Merge commit '026c837b919ab4380e8a6e6c052364bbf9bbe8a3' as 'core'

This commit is contained in:
Du Wenbo
2026-04-04 18:17:10 +08:00
227 changed files with 39179 additions and 0 deletions

View File

View File

@@ -0,0 +1,291 @@
"""Scheduled aggregation engine for energy data rollups.
Computes hourly, daily, and monthly aggregations from raw EnergyData
and populates EnergyDailySummary. Follows the APScheduler pattern
established in report_scheduler.py.
"""
import logging
from datetime import datetime, timedelta, timezone
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.triggers.cron import CronTrigger
from sqlalchemy import select, func, and_, text, Integer
from app.core.config import get_settings
from app.core.database import async_session
from app.models.energy import EnergyData, EnergyDailySummary
logger = logging.getLogger("aggregation")
_scheduler: AsyncIOScheduler | None = None
async def aggregate_hourly():
"""Aggregate raw energy_data into hourly avg/min/max per device+data_type.
Processes data from the previous hour. Results are logged but not
stored separately — the primary use is for cache warming and monitoring.
Daily aggregation (which writes to EnergyDailySummary) is the persistent rollup.
"""
now = datetime.now(timezone.utc)
hour_start = now.replace(minute=0, second=0, microsecond=0) - timedelta(hours=1)
hour_end = hour_start + timedelta(hours=1)
logger.info("Running hourly aggregation for %s", hour_start.isoformat())
async with async_session() as session:
settings = get_settings()
query = (
select(
EnergyData.device_id,
EnergyData.data_type,
func.avg(EnergyData.value).label("avg_value"),
func.min(EnergyData.value).label("min_value"),
func.max(EnergyData.value).label("max_value"),
func.count(EnergyData.id).label("sample_count"),
)
.where(
and_(
EnergyData.timestamp >= hour_start,
EnergyData.timestamp < hour_end,
)
)
.group_by(EnergyData.device_id, EnergyData.data_type)
)
result = await session.execute(query)
rows = result.all()
logger.info(
"Hourly aggregation complete: %d device/type groups for %s",
len(rows),
hour_start.isoformat(),
)
return rows
async def aggregate_daily():
"""Compute daily summaries and populate EnergyDailySummary.
Processes yesterday's data. Groups by device_id and maps data_type
to energy_type for the summary table.
"""
now = datetime.now(timezone.utc)
day_start = (now - timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0)
day_end = day_start + timedelta(days=1)
logger.info("Running daily aggregation for %s", day_start.date().isoformat())
# Map data_type -> energy_type for summary grouping
data_type_to_energy_type = {
"power": "electricity",
"energy": "electricity",
"voltage": "electricity",
"current": "electricity",
"heat_power": "heat",
"heat_energy": "heat",
"temperature": "heat",
"water_flow": "water",
"water_volume": "water",
"gas_flow": "gas",
"gas_volume": "gas",
}
async with async_session() as session:
# Fetch per-device aggregated stats for the day
query = (
select(
EnergyData.device_id,
EnergyData.data_type,
func.avg(EnergyData.value).label("avg_value"),
func.min(EnergyData.value).label("min_value"),
func.max(EnergyData.value).label("max_value"),
func.sum(EnergyData.value).label("total_value"),
func.count(EnergyData.id).label("sample_count"),
)
.where(
and_(
EnergyData.timestamp >= day_start,
EnergyData.timestamp < day_end,
)
)
.group_by(EnergyData.device_id, EnergyData.data_type)
)
result = await session.execute(query)
rows = result.all()
# Group results by (device_id, energy_type)
device_summaries: dict[tuple[int, str], dict] = {}
for row in rows:
energy_type = data_type_to_energy_type.get(row.data_type, "electricity")
key = (row.device_id, energy_type)
if key not in device_summaries:
device_summaries[key] = {
"peak_power": None,
"min_power": None,
"avg_power": None,
"total_consumption": 0.0,
"total_generation": 0.0,
"avg_temperature": None,
"sample_count": 0,
}
summary = device_summaries[key]
summary["sample_count"] += row.sample_count
# Power-type metrics
if row.data_type in ("power", "heat_power"):
summary["peak_power"] = max(
summary["peak_power"] or 0, row.max_value or 0
)
summary["min_power"] = min(
summary["min_power"] if summary["min_power"] is not None else float("inf"),
row.min_value or 0,
)
summary["avg_power"] = row.avg_value
# Consumption (energy, volume)
if row.data_type in ("energy", "heat_energy", "water_volume", "gas_volume"):
summary["total_consumption"] += row.total_value or 0
# Temperature
if row.data_type == "temperature":
summary["avg_temperature"] = row.avg_value
# Delete existing summaries for the same date to allow re-runs
await session.execute(
EnergyDailySummary.__table__.delete().where(
EnergyDailySummary.date == day_start
)
)
# Insert new summaries
summaries = []
for (device_id, energy_type), stats in device_summaries.items():
summaries.append(
EnergyDailySummary(
device_id=device_id,
date=day_start,
energy_type=energy_type,
total_consumption=round(stats["total_consumption"], 4),
total_generation=0.0,
peak_power=round(stats["peak_power"], 4) if stats["peak_power"] else None,
min_power=round(stats["min_power"], 4) if stats["min_power"] is not None and stats["min_power"] != float("inf") else None,
avg_power=round(stats["avg_power"], 4) if stats["avg_power"] else None,
avg_temperature=round(stats["avg_temperature"], 2) if stats["avg_temperature"] else None,
)
)
if summaries:
session.add_all(summaries)
await session.commit()
logger.info(
"Daily aggregation complete: %d summaries for %s",
len(summaries) if device_summaries else 0,
day_start.date().isoformat(),
)
async def aggregate_monthly():
"""Compute monthly rollups from EnergyDailySummary.
Aggregates the previous month's daily summaries. Results are logged
for monitoring — monthly reports use ReportGenerator for output.
"""
now = datetime.now(timezone.utc)
first_of_current = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
last_month_end = first_of_current - timedelta(days=1)
month_start = last_month_end.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
logger.info(
"Running monthly aggregation for %s-%02d",
month_start.year,
month_start.month,
)
async with async_session() as session:
query = (
select(
EnergyDailySummary.device_id,
EnergyDailySummary.energy_type,
func.sum(EnergyDailySummary.total_consumption).label("total_consumption"),
func.sum(EnergyDailySummary.total_generation).label("total_generation"),
func.max(EnergyDailySummary.peak_power).label("peak_power"),
func.min(EnergyDailySummary.min_power).label("min_power"),
func.avg(EnergyDailySummary.avg_power).label("avg_power"),
func.sum(EnergyDailySummary.operating_hours).label("total_operating_hours"),
func.sum(EnergyDailySummary.cost).label("total_cost"),
func.sum(EnergyDailySummary.carbon_emission).label("total_carbon"),
)
.where(
and_(
EnergyDailySummary.date >= month_start,
EnergyDailySummary.date < first_of_current,
)
)
.group_by(EnergyDailySummary.device_id, EnergyDailySummary.energy_type)
)
result = await session.execute(query)
rows = result.all()
logger.info(
"Monthly aggregation complete: %d device/type groups for %s-%02d",
len(rows),
month_start.year,
month_start.month,
)
return rows
async def start_aggregation_scheduler():
"""Start the APScheduler-based aggregation scheduler."""
global _scheduler
settings = get_settings()
if not settings.AGGREGATION_ENABLED:
logger.info("Aggregation scheduler disabled by config.")
return
if _scheduler and _scheduler.running:
logger.warning("Aggregation scheduler is already running.")
return
_scheduler = AsyncIOScheduler(timezone="Asia/Shanghai")
# Hourly aggregation: every hour at :05
_scheduler.add_job(
aggregate_hourly,
CronTrigger(minute=5),
id="aggregate_hourly",
replace_existing=True,
misfire_grace_time=600,
)
# Daily aggregation: every day at 00:30
_scheduler.add_job(
aggregate_daily,
CronTrigger(hour=0, minute=30),
id="aggregate_daily",
replace_existing=True,
misfire_grace_time=3600,
)
# Monthly aggregation: 1st of each month at 01:00
_scheduler.add_job(
aggregate_monthly,
CronTrigger(day=1, hour=1, minute=0),
id="aggregate_monthly",
replace_existing=True,
misfire_grace_time=7200,
)
_scheduler.start()
logger.info("Aggregation scheduler started (hourly @:05, daily @00:30, monthly @1st 01:00).")
async def stop_aggregation_scheduler():
"""Stop the aggregation scheduler gracefully."""
global _scheduler
if _scheduler and _scheduler.running:
_scheduler.shutdown(wait=False)
logger.info("Aggregation scheduler stopped.")
_scheduler = None

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,606 @@
"""AI预测引擎 - 光伏发电、负荷、热泵COP预测与自发自用优化
Uses physics-based models from weather_model.py combined with statistical
methods (moving averages, exponential smoothing, seasonal decomposition)
to generate realistic forecasts. Inspired by Envision's 天枢能源大模型.
"""
import math
import logging
from datetime import datetime, timezone, timedelta
from typing import Optional
import numpy as np
from sqlalchemy import select, func, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.device import Device
from app.models.energy import EnergyData
from app.models.prediction import PredictionTask, PredictionResult, OptimizationSchedule
from app.services.weather_model import (
outdoor_temperature, solar_altitude, get_cloud_factor,
pv_power as _physics_pv_power, get_pv_orientation,
get_hvac_mode, MONTHLY_AVG_TEMP, MONTHLY_DIURNAL_SWING,
)
logger = logging.getLogger("ai_prediction")
# Beijing electricity TOU pricing (yuan/kWh) - simplified
TOU_PRICE = {
"peak": 1.2, # 10:00-15:00, 18:00-21:00
"shoulder": 0.8, # 07:00-10:00, 15:00-18:00, 21:00-23:00
"valley": 0.4, # 23:00-07:00
}
def _get_tou_price(hour: int) -> float:
if 10 <= hour < 15 or 18 <= hour < 21:
return TOU_PRICE["peak"]
elif 7 <= hour < 10 or 15 <= hour < 18 or 21 <= hour < 23:
return TOU_PRICE["shoulder"]
else:
return TOU_PRICE["valley"]
# ---------------------------------------------------------------------------
# PV Power Forecasting
# ---------------------------------------------------------------------------
async def forecast_pv(
db: AsyncSession,
device_id: int,
horizon_hours: int = 24,
) -> list[dict]:
"""Forecast PV generation for the next horizon_hours.
Combines physics-based solar model with historical pattern correction.
Returns list of {timestamp, predicted_power_kw, confidence_lower, confidence_upper}.
"""
device = await db.get(Device, device_id)
if not device:
raise ValueError(f"Device {device_id} not found")
rated_power = device.rated_power or 110.0
orientation = get_pv_orientation(device.code or "")
# Fetch recent historical data for pattern correction
now = datetime.now(timezone.utc)
lookback = now - timedelta(days=7)
result = await db.execute(
select(EnergyData.timestamp, EnergyData.value)
.where(and_(
EnergyData.device_id == device_id,
EnergyData.data_type == "power",
EnergyData.timestamp >= lookback,
))
.order_by(EnergyData.timestamp)
)
historical = result.all()
# Calculate hourly averages from history for bias correction
hourly_actual: dict[int, list[float]] = {h: [] for h in range(24)}
for ts, val in historical:
beijing_h = (ts.hour + 8) % 24 if ts.tzinfo else ts.hour
hourly_actual[beijing_h].append(val)
hourly_avg = {
h: np.mean(vals) if vals else None
for h, vals in hourly_actual.items()
}
# Generate physics-based forecast with bias correction
forecasts = []
for h_offset in range(horizon_hours):
target_utc = now + timedelta(hours=h_offset)
target_utc = target_utc.replace(minute=0, second=0, microsecond=0)
# Physics model baseline
base_power = _physics_pv_power(target_utc, rated_power=rated_power,
orientation=orientation,
device_code=device.code or "")
# Bias correction from recent history
beijing_hour = (target_utc.hour + 8) % 24
hist_avg = hourly_avg.get(beijing_hour)
if hist_avg is not None and base_power > 0:
# Blend: 70% physics + 30% historical pattern
correction = hist_avg / max(base_power, 0.1)
correction = max(0.7, min(1.3, correction))
predicted = base_power * (0.7 + 0.3 * correction)
else:
predicted = base_power
# Confidence interval widens with forecast horizon
uncertainty = 0.05 + 0.02 * h_offset # grows with time
uncertainty = min(uncertainty, 0.40)
margin = predicted * uncertainty
conf_lower = max(0, predicted - margin)
conf_upper = min(rated_power, predicted + margin)
forecasts.append({
"timestamp": target_utc.isoformat(),
"predicted_power_kw": round(predicted, 2),
"confidence_lower": round(conf_lower, 2),
"confidence_upper": round(conf_upper, 2),
})
return forecasts
# ---------------------------------------------------------------------------
# Load Forecasting
# ---------------------------------------------------------------------------
async def forecast_load(
db: AsyncSession,
device_id: Optional[int] = None,
building_type: str = "office",
horizon_hours: int = 24,
) -> list[dict]:
"""Forecast building electricity load.
Uses day-of-week patterns, hourly profiles, and seasonal temperature
correlation. If device_id is None, forecasts aggregate campus load.
"""
now = datetime.now(timezone.utc)
beijing_now = now + timedelta(hours=8)
# Fetch recent history for pattern calibration
lookback = now - timedelta(days=14)
conditions = [
EnergyData.data_type == "power",
EnergyData.timestamp >= lookback,
]
if device_id:
conditions.append(EnergyData.device_id == device_id)
else:
# Aggregate all meters
conditions.append(
EnergyData.device_id.in_(
select(Device.id).where(Device.device_type == "meter")
)
)
result = await db.execute(
select(EnergyData.timestamp, EnergyData.value)
.where(and_(*conditions))
.order_by(EnergyData.timestamp)
)
historical = result.all()
# Build weekday/weekend hourly profiles from history
weekday_profile: dict[int, list[float]] = {h: [] for h in range(24)}
weekend_profile: dict[int, list[float]] = {h: [] for h in range(24)}
for ts, val in historical:
bj = ts + timedelta(hours=8) if ts.tzinfo else ts
h = bj.hour
if bj.weekday() >= 5:
weekend_profile[h].append(val)
else:
weekday_profile[h].append(val)
# Default load profile if no history
default_weekday = {
0: 18, 1: 16, 2: 16, 3: 15, 4: 15, 5: 17, 6: 25, 7: 40,
8: 55, 9: 60, 10: 62, 11: 58, 12: 45, 13: 58, 14: 62,
15: 60, 16: 55, 17: 48, 18: 35, 19: 28, 20: 25, 21: 22, 22: 20, 23: 18,
}
default_weekend = {h: v * 0.5 for h, v in default_weekday.items()}
def _avg_or_default(profile, defaults, h):
vals = profile.get(h, [])
return float(np.mean(vals)) if vals else defaults[h]
forecasts = []
for h_offset in range(horizon_hours):
target_utc = now + timedelta(hours=h_offset)
target_utc = target_utc.replace(minute=0, second=0, microsecond=0)
bj = target_utc + timedelta(hours=8)
hour = bj.hour
is_weekend = bj.weekday() >= 5
if is_weekend:
base_load = _avg_or_default(weekend_profile, default_weekend, hour)
else:
base_load = _avg_or_default(weekday_profile, default_weekday, hour)
# Temperature correction: HVAC adds load in extreme temps
temp = outdoor_temperature(target_utc)
if temp < 5:
hvac_factor = 1.0 + 0.02 * (5 - temp)
elif temp > 28:
hvac_factor = 1.0 + 0.025 * (temp - 28)
else:
hvac_factor = 1.0
hvac_factor = min(hvac_factor, 1.4)
predicted = base_load * hvac_factor
# Factory buildings have flatter profiles
if building_type == "factory":
predicted = predicted * 0.85 + base_load * 0.15
# Confidence interval
uncertainty = 0.08 + 0.015 * h_offset
uncertainty = min(uncertainty, 0.35)
margin = predicted * uncertainty
forecasts.append({
"timestamp": target_utc.isoformat(),
"predicted_load_kw": round(predicted, 2),
"confidence_lower": round(max(0, predicted - margin), 2),
"confidence_upper": round(predicted + margin, 2),
})
return forecasts
# ---------------------------------------------------------------------------
# Heat Pump COP Prediction
# ---------------------------------------------------------------------------
async def forecast_heatpump_cop(
db: AsyncSession,
device_id: int,
horizon_hours: int = 24,
) -> list[dict]:
"""Predict heat pump COP based on outdoor temperature forecast.
COP model: COP = base_cop + 0.05 * (T_outdoor - 7), clamped [2.0, 5.5].
Returns optimal operating windows ranked by expected COP.
"""
device = await db.get(Device, device_id)
if not device:
raise ValueError(f"Device {device_id} not found")
now = datetime.now(timezone.utc)
forecasts = []
for h_offset in range(horizon_hours):
target_utc = now + timedelta(hours=h_offset)
target_utc = target_utc.replace(minute=0, second=0, microsecond=0)
bj = target_utc + timedelta(hours=8)
temp = outdoor_temperature(target_utc)
mode = get_hvac_mode(bj.month)
# COP model (same as weather_model but deterministic for forecast)
if mode in ("heating", "transition_spring", "transition_fall"):
cop = 3.0 + 0.05 * (temp - 7)
else: # cooling
cop = 4.0 - 0.04 * (temp - 25)
cop = max(2.0, min(5.5, cop))
# Estimated power at this COP
rated = device.rated_power or 35.0
# Load factor based on time and mode
if mode == "heating":
if 6 <= bj.hour < 9:
load_factor = 0.85
elif 9 <= bj.hour < 16:
load_factor = 0.55
elif 16 <= bj.hour < 22:
load_factor = 0.75
else:
load_factor = 0.65
elif mode == "cooling":
if 11 <= bj.hour < 16:
load_factor = 0.85
elif 8 <= bj.hour < 11 or 16 <= bj.hour < 19:
load_factor = 0.60
else:
load_factor = 0.25
else:
load_factor = 0.35
if bj.weekday() >= 5:
load_factor *= 0.7
est_power = rated * load_factor
electricity_price = _get_tou_price(bj.hour)
operating_cost = est_power * electricity_price # yuan/h
forecasts.append({
"timestamp": target_utc.isoformat(),
"predicted_cop": round(cop, 2),
"outdoor_temp": round(temp, 1),
"estimated_power_kw": round(est_power, 2),
"load_factor": round(load_factor, 2),
"electricity_price": electricity_price,
"operating_cost_yuan_h": round(operating_cost, 2),
"mode": mode,
})
return forecasts
# ---------------------------------------------------------------------------
# Self-Consumption Optimization
# ---------------------------------------------------------------------------
async def optimize_self_consumption(
db: AsyncSession,
horizon_hours: int = 24,
) -> dict:
"""Compare predicted PV generation vs predicted load to find optimization
opportunities. Recommends heat pump pre-heating during PV surplus.
Returns:
- hourly comparison (pv vs load)
- surplus/deficit periods
- recommended heat pump schedule
- expected savings
"""
now = datetime.now(timezone.utc)
# Get all PV inverter device ids
pv_result = await db.execute(
select(Device).where(Device.device_type == "pv_inverter", Device.is_active == True)
)
pv_devices = pv_result.scalars().all()
# Aggregate PV forecast
pv_total = [0.0] * horizon_hours
for dev in pv_devices:
pv_forecast = await forecast_pv(db, dev.id, horizon_hours)
for i, f in enumerate(pv_forecast):
pv_total[i] += f["predicted_power_kw"]
# Aggregate load forecast
load_forecast = await forecast_load(db, device_id=None, horizon_hours=horizon_hours)
# Build hourly comparison
hourly = []
surplus_periods = []
deficit_periods = []
total_surplus_kwh = 0.0
total_deficit_kwh = 0.0
for i in range(horizon_hours):
target_utc = now + timedelta(hours=i)
target_utc = target_utc.replace(minute=0, second=0, microsecond=0)
bj = target_utc + timedelta(hours=8)
pv_kw = pv_total[i]
load_kw = load_forecast[i]["predicted_load_kw"]
balance = pv_kw - load_kw
price = _get_tou_price(bj.hour)
entry = {
"timestamp": target_utc.isoformat(),
"hour": bj.hour,
"pv_generation_kw": round(pv_kw, 2),
"load_kw": round(load_kw, 2),
"balance_kw": round(balance, 2),
"electricity_price": price,
}
hourly.append(entry)
if balance > 2: # >2kW surplus threshold
surplus_periods.append({"hour": bj.hour, "surplus_kw": round(balance, 2)})
total_surplus_kwh += balance
elif balance < -2:
deficit_periods.append({"hour": bj.hour, "deficit_kw": round(-balance, 2)})
total_deficit_kwh += (-balance)
# Generate heat pump optimization schedule
# Strategy: shift heat pump load to PV surplus periods
hp_schedule = []
savings_kwh = 0.0
savings_yuan = 0.0
for period in surplus_periods:
hour = period["hour"]
surplus = period["surplus_kw"]
price = _get_tou_price(hour)
# Use surplus to pre-heat/pre-cool
usable_power = min(surplus, 35.0) # cap at single HP rated power
hp_schedule.append({
"hour": hour,
"action": "boost",
"power_kw": round(usable_power, 2),
"reason": "利用光伏余电预加热/预冷",
})
savings_kwh += usable_power
savings_yuan += usable_power * price
# Also recommend reducing HP during peak-price deficit periods
for period in deficit_periods:
hour = period["hour"]
price = _get_tou_price(hour)
if price >= TOU_PRICE["peak"]:
hp_schedule.append({
"hour": hour,
"action": "reduce",
"power_kw": 0,
"reason": "高电价时段降低热泵负荷",
})
# Estimate savings from reduced operation during peak
savings_yuan += 5.0 * price # assume 5kW reduction
self_consumption_rate = 0.0
total_pv = sum(pv_total)
total_load = sum(f["predicted_load_kw"] for f in load_forecast)
if total_pv > 0:
self_consumed = min(total_pv, total_load)
self_consumption_rate = self_consumed / total_pv * 100
return {
"hourly": hourly,
"surplus_periods": surplus_periods,
"deficit_periods": deficit_periods,
"hp_schedule": hp_schedule,
"summary": {
"total_pv_kwh": round(total_pv, 2),
"total_load_kwh": round(total_load, 2),
"total_surplus_kwh": round(total_surplus_kwh, 2),
"total_deficit_kwh": round(total_deficit_kwh, 2),
"self_consumption_rate": round(self_consumption_rate, 1),
"potential_savings_kwh": round(savings_kwh, 2),
"potential_savings_yuan": round(savings_yuan, 2),
},
}
# ---------------------------------------------------------------------------
# Prediction Accuracy
# ---------------------------------------------------------------------------
async def get_prediction_accuracy(
db: AsyncSession,
prediction_type: Optional[str] = None,
days: int = 7,
) -> dict:
"""Calculate prediction accuracy metrics (MAE, RMSE, MAPE) from
historical predictions that have actual values filled in."""
cutoff = datetime.now(timezone.utc) - timedelta(days=days)
conditions = [
PredictionResult.actual_value.isnot(None),
PredictionResult.timestamp >= cutoff,
]
if prediction_type:
conditions.append(
PredictionResult.task_id.in_(
select(PredictionTask.id).where(
PredictionTask.prediction_type == prediction_type
)
)
)
result = await db.execute(
select(PredictionResult.predicted_value, PredictionResult.actual_value)
.where(and_(*conditions))
)
pairs = result.all()
if not pairs:
# Return mock accuracy for demo (simulating a well-tuned model)
return {
"sample_count": 0,
"mae": 2.5,
"rmse": 3.8,
"mape": 8.5,
"note": "使用模拟精度指标(无历史预测数据)",
}
predicted = np.array([p[0] for p in pairs])
actual = np.array([p[1] for p in pairs])
errors = predicted - actual
mae = float(np.mean(np.abs(errors)))
rmse = float(np.sqrt(np.mean(errors ** 2)))
# MAPE: only where actual > 0 to avoid division by zero
mask = actual > 0.1
if mask.any():
mape = float(np.mean(np.abs(errors[mask] / actual[mask])) * 100)
else:
mape = 0.0
return {
"sample_count": len(pairs),
"mae": round(mae, 2),
"rmse": round(rmse, 2),
"mape": round(mape, 1),
}
# ---------------------------------------------------------------------------
# Run Prediction (creates task + results)
# ---------------------------------------------------------------------------
async def run_prediction(
db: AsyncSession,
device_id: Optional[int],
prediction_type: str,
horizon_hours: int = 24,
parameters: Optional[dict] = None,
) -> PredictionTask:
"""Execute a prediction and store results in the database."""
task = PredictionTask(
device_id=device_id,
prediction_type=prediction_type,
horizon_hours=horizon_hours,
status="running",
parameters=parameters or {},
)
db.add(task)
await db.flush()
try:
if prediction_type == "pv":
if not device_id:
raise ValueError("device_id required for PV forecast")
forecasts = await forecast_pv(db, device_id, horizon_hours)
for f in forecasts:
db.add(PredictionResult(
task_id=task.id,
timestamp=f["timestamp"],
predicted_value=f["predicted_power_kw"],
confidence_lower=f["confidence_lower"],
confidence_upper=f["confidence_upper"],
unit="kW",
))
elif prediction_type == "load":
building_type = (parameters or {}).get("building_type", "office")
forecasts = await forecast_load(db, device_id, building_type, horizon_hours)
for f in forecasts:
db.add(PredictionResult(
task_id=task.id,
timestamp=f["timestamp"],
predicted_value=f["predicted_load_kw"],
confidence_lower=f["confidence_lower"],
confidence_upper=f["confidence_upper"],
unit="kW",
))
elif prediction_type == "heatpump":
if not device_id:
raise ValueError("device_id required for heat pump forecast")
forecasts = await forecast_heatpump_cop(db, device_id, horizon_hours)
for f in forecasts:
db.add(PredictionResult(
task_id=task.id,
timestamp=f["timestamp"],
predicted_value=f["predicted_cop"],
confidence_lower=max(2.0, f["predicted_cop"] - 0.3),
confidence_upper=min(5.5, f["predicted_cop"] + 0.3),
unit="",
))
elif prediction_type == "optimization":
opt = await optimize_self_consumption(db, horizon_hours)
# Store as optimization schedule
now = datetime.now(timezone.utc)
schedule = OptimizationSchedule(
date=now.replace(hour=0, minute=0, second=0, microsecond=0),
schedule_data=opt,
expected_savings_kwh=opt["summary"]["potential_savings_kwh"],
expected_savings_yuan=opt["summary"]["potential_savings_yuan"],
status="pending",
)
db.add(schedule)
# Also store hourly balance as prediction results
for entry in opt["hourly"]:
db.add(PredictionResult(
task_id=task.id,
timestamp=entry["timestamp"],
predicted_value=entry["balance_kw"],
unit="kW",
))
else:
raise ValueError(f"Unknown prediction type: {prediction_type}")
task.status = "completed"
task.completed_at = datetime.now(timezone.utc)
except Exception as e:
task.status = "failed"
task.error_message = str(e)
logger.error(f"Prediction task {task.id} failed: {e}", exc_info=True)
return task

View File

@@ -0,0 +1,253 @@
"""告警检测服务 - 根据告警规则检查最新数据,生成/自动恢复告警事件"""
import asyncio
import logging
from datetime import datetime, timezone, timedelta
from pathlib import Path
from sqlalchemy import select, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.alarm import AlarmRule, AlarmEvent
from app.models.energy import EnergyData
from app.models.device import Device
logger = logging.getLogger("alarm_checker")
# Alarm email template path
_ALARM_TEMPLATE_PATH = Path(__file__).resolve().parent.parent / "templates" / "alarm_email.html"
# Severity display config
_SEVERITY_CONFIG = {
"critical": {
"label": "紧急告警",
"badge_color": "#d32f2f",
"bg_color": "#ffebee",
"text_color": "#c62828",
},
"major": {
"label": "重要告警",
"badge_color": "#e65100",
"bg_color": "#fff3e0",
"text_color": "#e65100",
},
"warning": {
"label": "一般告警",
"badge_color": "#f9a825",
"bg_color": "#fffde7",
"text_color": "#f57f17",
},
}
async def _send_alarm_email(
rule: AlarmRule, event: AlarmEvent, device_id: int, session: AsyncSession
):
"""Send alarm notification email if configured."""
from app.services.email_service import send_email
from app.core.config import get_settings
# Check if email is in notify_channels
channels = rule.notify_channels or []
if "email" not in channels:
return
# Get email targets from notify_targets
targets = rule.notify_targets or {}
emails = targets.get("emails", []) if isinstance(targets, dict) else []
# If notify_targets is a list of strings (emails directly)
if isinstance(targets, list):
emails = [t for t in targets if isinstance(t, str) and "@" in t]
if not emails:
logger.debug(f"No email recipients for alarm rule '{rule.name}', skipping.")
return
# Fetch device info
dev_result = await session.execute(select(Device).where(Device.id == device_id))
device = dev_result.scalar_one_or_none()
device_name = device.name if device else f"设备#{device_id}"
device_code = device.code if device else "N/A"
settings = get_settings()
severity_cfg = _SEVERITY_CONFIG.get(rule.severity, _SEVERITY_CONFIG["warning"])
# Build threshold string
if rule.condition == "range_out":
threshold_str = f"[{rule.threshold_low}, {rule.threshold_high}]"
else:
threshold_str = str(rule.threshold)
# Format triggered time in Beijing timezone
triggered_time = event.triggered_at or datetime.now(timezone.utc)
triggered_beijing = triggered_time + timedelta(hours=8)
triggered_str = triggered_beijing.strftime("%Y-%m-%d %H:%M:%S")
# Load and render template
try:
template_html = _ALARM_TEMPLATE_PATH.read_text(encoding="utf-8")
except FileNotFoundError:
logger.error("Alarm email template not found, skipping email.")
return
body_html = template_html.format(
severity_label=severity_cfg["label"],
severity_badge_color=severity_cfg["badge_color"],
severity_bg_color=severity_cfg["bg_color"],
severity_text_color=severity_cfg["text_color"],
title=event.title,
device_name=device_name,
device_code=device_code,
data_type=rule.data_type,
current_value=str(event.value),
threshold_str=threshold_str,
triggered_at=triggered_str,
description=event.description or "",
platform_url=settings.PLATFORM_URL,
)
subject = f"[{severity_cfg['label']}] {event.title} - 天普EMS告警通知"
asyncio.create_task(send_email(to=emails, subject=subject, body_html=body_html))
# Rate limit: don't create duplicate events for the same rule+device within this window
RATE_LIMIT_MINUTES = 5
def _in_silence_window(rule: AlarmRule, now_beijing: datetime) -> bool:
"""Check if current time falls within the rule's silence window."""
if not rule.silence_start or not rule.silence_end:
return False
current_time = now_beijing.strftime("%H:%M")
start = rule.silence_start
end = rule.silence_end
if start <= end:
return start <= current_time <= end
else:
# Crosses midnight, e.g. 22:00 - 06:00
return current_time >= start or current_time <= end
def _evaluate_condition(rule: AlarmRule, value: float) -> bool:
"""Evaluate whether a data value triggers the alarm rule condition."""
if rule.condition == "gt":
return value > rule.threshold
elif rule.condition == "lt":
return value < rule.threshold
elif rule.condition == "eq":
return abs(value - rule.threshold) < 0.001
elif rule.condition == "neq":
return abs(value - rule.threshold) >= 0.001
elif rule.condition == "range_out":
low = rule.threshold_low if rule.threshold_low is not None else float("-inf")
high = rule.threshold_high if rule.threshold_high is not None else float("inf")
return value < low or value > high
return False
async def check_alarms(session: AsyncSession):
"""Main alarm check routine. Call after each simulator data cycle."""
now = datetime.now(timezone.utc)
now_beijing = now + timedelta(hours=8)
# 1. Load all active alarm rules
result = await session.execute(
select(AlarmRule).where(AlarmRule.is_active == True)
)
rules = result.scalars().all()
for rule in rules:
# Skip if in silence window
if _in_silence_window(rule, now_beijing):
continue
# 2. Find matching devices' latest data point
# Rules can match by device_id (specific) or device_type (all devices of that type)
data_query = (
select(EnergyData)
.where(EnergyData.data_type == rule.data_type)
.order_by(EnergyData.timestamp.desc())
)
if rule.device_id:
data_query = data_query.where(EnergyData.device_id == rule.device_id)
# We need to check per-device, so get recent data points
# For device_type rules, we get data from the last 30 seconds (one cycle)
cutoff = now - timedelta(seconds=30)
data_query = data_query.where(EnergyData.timestamp >= cutoff).limit(50)
data_result = await session.execute(data_query)
data_points = data_result.scalars().all()
if not data_points:
continue
# Group by device_id and take the latest per device
latest_by_device: dict[int, EnergyData] = {}
for dp in data_points:
if dp.device_id not in latest_by_device:
latest_by_device[dp.device_id] = dp
for device_id, dp in latest_by_device.items():
triggered = _evaluate_condition(rule, dp.value)
# Check for existing active event for this rule + device
active_event_result = await session.execute(
select(AlarmEvent).where(
and_(
AlarmEvent.rule_id == rule.id,
AlarmEvent.device_id == device_id,
AlarmEvent.status.in_(["active", "acknowledged"]),
)
)
)
active_event = active_event_result.scalar_one_or_none()
if triggered and not active_event:
# Rate limiting: check if a resolved event was created recently
recent_result = await session.execute(
select(AlarmEvent).where(
and_(
AlarmEvent.rule_id == rule.id,
AlarmEvent.device_id == device_id,
AlarmEvent.triggered_at >= now - timedelta(minutes=RATE_LIMIT_MINUTES),
)
)
)
if recent_result.scalar_one_or_none():
continue # Skip, recently triggered
# Build description
threshold_str = ""
if rule.condition == "range_out":
threshold_str = f"[{rule.threshold_low}, {rule.threshold_high}]"
else:
threshold_str = str(rule.threshold)
event = AlarmEvent(
rule_id=rule.id,
device_id=device_id,
severity=rule.severity,
title=rule.name,
description=f"当前值 {dp.value},阈值 {threshold_str}",
value=dp.value,
threshold=rule.threshold,
status="active",
triggered_at=now,
)
session.add(event)
logger.info(
f"Alarm triggered: {rule.name} | device={device_id} | "
f"value={dp.value} threshold={threshold_str}"
)
# Send email notification (non-blocking)
await _send_alarm_email(rule, event, device_id, session)
elif not triggered and active_event:
# Auto-resolve
active_event.status = "resolved"
active_event.resolved_at = now
active_event.resolve_note = "自动恢复"
logger.info(
f"Alarm auto-resolved: {rule.name} | device={device_id}"
)
await session.flush()

View File

@@ -0,0 +1,32 @@
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.user import AuditLog
async def log_audit(
db: AsyncSession,
user_id: int | None,
action: str,
resource: str,
detail: str = "",
ip_address: str | None = None,
):
"""Write an audit log entry.
Args:
db: async database session (must be flushed/committed by caller)
user_id: ID of the acting user (None for system actions)
action: one of login, create, update, delete, export, view,
acknowledge, resolve
resource: one of user, device, alarm, report, system, auth
detail: human-readable description
ip_address: client IP if available
"""
entry = AuditLog(
user_id=user_id,
action=action,
resource=resource,
detail=detail,
ip_address=ip_address,
)
db.add(entry)
# Don't flush here — let the caller's transaction handle it

View File

@@ -0,0 +1,462 @@
"""Carbon Asset Management Service.
Provides carbon accounting, CCER/green certificate management,
report generation, target tracking, and benchmark comparison.
"""
import logging
from datetime import date, datetime, timezone
from typing import Optional
from sqlalchemy import select, func, and_, case
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.carbon import (
CarbonEmission, EmissionFactor, CarbonTarget, CarbonReduction,
GreenCertificate, CarbonReport, CarbonBenchmark,
)
from app.models.energy import EnergyData
logger = logging.getLogger("carbon_asset")
# China national grid emission factor 2023 (tCO2/MWh)
GRID_EMISSION_FACTOR = 0.5810
# Average CO2 absorption per tree per year (tons)
TREE_ABSORPTION = 0.02
# ---------------------------------------------------------------------------
# Carbon Accounting
# ---------------------------------------------------------------------------
async def calculate_scope2_emission(
db: AsyncSession, start: date, end: date,
) -> float:
"""Calculate Scope 2 emission from grid electricity (tons CO2)."""
result = await db.execute(
select(func.sum(CarbonEmission.emission))
.where(and_(
CarbonEmission.scope == 2,
func.date(CarbonEmission.date) >= start,
func.date(CarbonEmission.date) <= end,
))
)
val = result.scalar() or 0
return round(val / 1000, 4) # kg -> tons
async def calculate_scope1_emission(
db: AsyncSession, start: date, end: date,
) -> float:
"""Calculate Scope 1 direct emissions (natural gas etc.) in tons CO2."""
result = await db.execute(
select(func.sum(CarbonEmission.emission))
.where(and_(
CarbonEmission.scope == 1,
func.date(CarbonEmission.date) >= start,
func.date(CarbonEmission.date) <= end,
))
)
val = result.scalar() or 0
return round(val / 1000, 4)
async def calculate_total_reduction(
db: AsyncSession, start: date, end: date,
) -> float:
"""Total carbon reduction in tons."""
result = await db.execute(
select(func.sum(CarbonEmission.reduction))
.where(and_(
func.date(CarbonEmission.date) >= start,
func.date(CarbonEmission.date) <= end,
))
)
val = result.scalar() or 0
return round(val / 1000, 4)
async def calculate_pv_reduction(
db: AsyncSession, start: date, end: date,
) -> float:
"""Carbon reduction from PV self-consumption (tons).
Formula: reduction = pv_generation_mwh * GRID_EMISSION_FACTOR
"""
result = await db.execute(
select(func.sum(CarbonEmission.reduction))
.where(and_(
CarbonEmission.category == "pv_generation",
func.date(CarbonEmission.date) >= start,
func.date(CarbonEmission.date) <= end,
))
)
val = result.scalar() or 0
return round(val / 1000, 4)
# ---------------------------------------------------------------------------
# Reduction tracking
# ---------------------------------------------------------------------------
async def get_reduction_summary(
db: AsyncSession, start: date, end: date,
) -> list[dict]:
"""Reduction summary grouped by source type."""
result = await db.execute(
select(
CarbonReduction.source_type,
func.sum(CarbonReduction.reduction_tons),
func.sum(CarbonReduction.equivalent_trees),
func.count(CarbonReduction.id),
)
.where(and_(
CarbonReduction.date >= start,
CarbonReduction.date <= end,
))
.group_by(CarbonReduction.source_type)
)
return [
{
"source_type": row[0],
"reduction_tons": round(row[1] or 0, 4),
"equivalent_trees": round(row[2] or 0, 1),
"count": row[3],
}
for row in result.all()
]
async def trigger_reduction_calculation(
db: AsyncSession, start: date, end: date,
) -> dict:
"""Compute reduction activities from energy data and persist them.
Calculates PV generation reduction and heat pump COP savings.
"""
# PV reduction from carbon_emissions records
pv_tons = await calculate_pv_reduction(db, start, end)
total_reduction = await calculate_total_reduction(db, start, end)
heat_pump_tons = max(0, total_reduction - pv_tons)
records_created = 0
for source, tons in [("pv_generation", pv_tons), ("heat_pump_cop", heat_pump_tons)]:
if tons > 0:
existing = await db.execute(
select(CarbonReduction).where(and_(
CarbonReduction.source_type == source,
CarbonReduction.date == end,
))
)
if existing.scalar_one_or_none() is None:
db.add(CarbonReduction(
source_type=source,
date=end,
reduction_tons=tons,
equivalent_trees=round(tons / TREE_ABSORPTION, 1),
methodology=f"Grid factor {GRID_EMISSION_FACTOR} tCO2/MWh",
verified=False,
))
records_created += 1
return {
"period": f"{start} ~ {end}",
"pv_reduction_tons": pv_tons,
"heat_pump_reduction_tons": heat_pump_tons,
"total_reduction_tons": total_reduction,
"records_created": records_created,
}
# ---------------------------------------------------------------------------
# CCER / Green Certificate Management
# ---------------------------------------------------------------------------
async def calculate_ccer_eligible(
db: AsyncSession, start: date, end: date,
) -> dict:
"""Calculate eligible CCER reduction from PV generation."""
pv_tons = await calculate_pv_reduction(db, start, end)
return {
"eligible_ccer_tons": pv_tons,
"grid_emission_factor": GRID_EMISSION_FACTOR,
"period": f"{start} ~ {end}",
}
async def get_certificate_portfolio_value(db: AsyncSession) -> dict:
"""Total value of active green certificates."""
result = await db.execute(
select(
GreenCertificate.status,
func.count(GreenCertificate.id),
func.sum(GreenCertificate.energy_mwh),
func.sum(GreenCertificate.price_yuan),
).group_by(GreenCertificate.status)
)
rows = result.all()
total_value = 0.0
total_mwh = 0.0
by_status = {}
for row in rows:
status, cnt, mwh, value = row
by_status[status] = {
"count": cnt,
"energy_mwh": round(mwh or 0, 2),
"value_yuan": round(value or 0, 2),
}
total_value += value or 0
total_mwh += mwh or 0
return {
"total_certificates": sum(v["count"] for v in by_status.values()),
"total_energy_mwh": round(total_mwh, 2),
"total_value_yuan": round(total_value, 2),
"by_status": by_status,
}
# ---------------------------------------------------------------------------
# Carbon Report Generation
# ---------------------------------------------------------------------------
async def generate_carbon_report(
db: AsyncSession,
report_type: str,
period_start: date,
period_end: date,
) -> CarbonReport:
"""Generate a carbon footprint report for the given period."""
scope1 = await calculate_scope1_emission(db, period_start, period_end)
scope2 = await calculate_scope2_emission(db, period_start, period_end)
reduction = await calculate_total_reduction(db, period_start, period_end)
total = scope1 + scope2
net = total - reduction
# Reduction breakdown
reduction_summary = await get_reduction_summary(db, period_start, period_end)
# Monthly breakdown
monthly = await _monthly_breakdown(db, period_start, period_end)
report_data = {
"scope_breakdown": {"scope1": scope1, "scope2": scope2},
"reduction_summary": reduction_summary,
"monthly_breakdown": monthly,
"grid_emission_factor": GRID_EMISSION_FACTOR,
"net_emission_tons": round(net, 4),
"green_rate": round((reduction / total * 100) if total > 0 else 0, 1),
}
report = CarbonReport(
report_type=report_type,
period_start=period_start,
period_end=period_end,
scope1_tons=scope1,
scope2_tons=scope2,
total_tons=round(total, 4),
reduction_tons=round(reduction, 4),
net_tons=round(net, 4),
report_data=report_data,
)
db.add(report)
return report
async def _monthly_breakdown(
db: AsyncSession, start: date, end: date,
) -> list[dict]:
"""Monthly emission and reduction totals for the period."""
from app.core.config import get_settings
settings = get_settings()
if settings.is_sqlite:
month_expr = func.strftime('%Y-%m', CarbonEmission.date).label('month')
else:
month_expr = func.to_char(
func.date_trunc('month', CarbonEmission.date), 'YYYY-MM'
).label('month')
result = await db.execute(
select(
month_expr,
func.sum(CarbonEmission.emission),
func.sum(CarbonEmission.reduction),
)
.where(and_(
func.date(CarbonEmission.date) >= start,
func.date(CarbonEmission.date) <= end,
))
.group_by('month')
.order_by('month')
)
return [
{
"month": row[0],
"emission_kg": round(row[1] or 0, 2),
"reduction_kg": round(row[2] or 0, 2),
"emission_tons": round((row[1] or 0) / 1000, 4),
"reduction_tons": round((row[2] or 0) / 1000, 4),
}
for row in result.all()
]
# ---------------------------------------------------------------------------
# Carbon Target Tracking
# ---------------------------------------------------------------------------
async def get_target_progress(db: AsyncSession, year: int) -> dict:
"""Calculate progress against annual and monthly targets."""
# Annual target
annual_q = await db.execute(
select(CarbonTarget).where(and_(
CarbonTarget.year == year,
CarbonTarget.month.is_(None),
))
)
annual = annual_q.scalar_one_or_none()
# All monthly targets for this year
monthly_q = await db.execute(
select(CarbonTarget).where(and_(
CarbonTarget.year == year,
CarbonTarget.month.isnot(None),
)).order_by(CarbonTarget.month)
)
monthlies = monthly_q.scalars().all()
# Current year actuals
year_start = date(year, 1, 1)
year_end = date(year, 12, 31)
scope1 = await calculate_scope1_emission(db, year_start, year_end)
scope2 = await calculate_scope2_emission(db, year_start, year_end)
reduction = await calculate_total_reduction(db, year_start, year_end)
total_emission = scope1 + scope2
net = total_emission - reduction
annual_data = None
if annual:
progress = (net / annual.target_emission_tons * 100) if annual.target_emission_tons > 0 else 0
status = "on_track" if progress <= 80 else ("warning" if progress <= 100 else "exceeded")
annual_data = {
"id": annual.id,
"target_tons": annual.target_emission_tons,
"actual_tons": round(net, 4),
"progress_pct": round(progress, 1),
"status": status,
}
monthly_data = []
for m in monthlies:
m_start = date(year, m.month, 1)
if m.month == 12:
m_end = date(year, 12, 31)
else:
m_end = date(year, m.month + 1, 1)
s1 = await calculate_scope1_emission(db, m_start, m_end)
s2 = await calculate_scope2_emission(db, m_start, m_end)
red = await calculate_total_reduction(db, m_start, m_end)
m_net = s1 + s2 - red
pct = (m_net / m.target_emission_tons * 100) if m.target_emission_tons > 0 else 0
monthly_data.append({
"id": m.id,
"month": m.month,
"target_tons": m.target_emission_tons,
"actual_tons": round(m_net, 4),
"progress_pct": round(pct, 1),
"status": "on_track" if pct <= 80 else ("warning" if pct <= 100 else "exceeded"),
})
return {
"year": year,
"total_emission_tons": round(total_emission, 4),
"total_reduction_tons": round(reduction, 4),
"net_emission_tons": round(net, 4),
"annual_target": annual_data,
"monthly_targets": monthly_data,
}
# ---------------------------------------------------------------------------
# Benchmark Comparison
# ---------------------------------------------------------------------------
async def compare_with_benchmarks(
db: AsyncSession, year: int,
) -> dict:
"""Compare actual emissions with industry benchmarks."""
benchmarks_q = await db.execute(
select(CarbonBenchmark).where(CarbonBenchmark.year == year)
)
benchmarks = benchmarks_q.scalars().all()
year_start = date(year, 1, 1)
year_end = date(year, 12, 31)
scope1 = await calculate_scope1_emission(db, year_start, year_end)
scope2 = await calculate_scope2_emission(db, year_start, year_end)
total = scope1 + scope2
reduction = await calculate_total_reduction(db, year_start, year_end)
comparisons = []
for b in benchmarks:
comparisons.append({
"industry": b.industry,
"metric": b.metric_name,
"benchmark_value": b.benchmark_value,
"unit": b.unit,
"source": b.source,
})
return {
"year": year,
"actual_emission_tons": round(total, 4),
"actual_reduction_tons": round(reduction, 4),
"net_tons": round(total - reduction, 4),
"benchmarks": comparisons,
}
# ---------------------------------------------------------------------------
# Dashboard aggregation
# ---------------------------------------------------------------------------
async def get_carbon_dashboard(db: AsyncSession) -> dict:
"""Comprehensive carbon dashboard data."""
now = datetime.now(timezone.utc)
year = now.year
year_start = date(year, 1, 1)
today = now.date()
scope1 = await calculate_scope1_emission(db, year_start, today)
scope2 = await calculate_scope2_emission(db, year_start, today)
total_emission = scope1 + scope2
reduction = await calculate_total_reduction(db, year_start, today)
net = total_emission - reduction
green_rate = round((reduction / total_emission * 100) if total_emission > 0 else 0, 1)
# Target progress
target_progress = await get_target_progress(db, year)
# Monthly trend
monthly = await _monthly_breakdown(db, year_start, today)
# Reduction summary
reduction_summary = await get_reduction_summary(db, year_start, today)
# Certificate value
cert_value = await get_certificate_portfolio_value(db)
return {
"kpi": {
"total_emission_tons": round(total_emission, 4),
"total_reduction_tons": round(reduction, 4),
"net_emission_tons": round(net, 4),
"green_rate": green_rate,
"scope1_tons": scope1,
"scope2_tons": scope2,
"equivalent_trees": round(reduction / TREE_ABSORPTION, 0) if reduction > 0 else 0,
},
"target_progress": target_progress.get("annual_target"),
"monthly_trend": monthly,
"reduction_by_source": reduction_summary,
"certificate_portfolio": cert_value,
}

View File

@@ -0,0 +1,261 @@
from datetime import datetime, timedelta, timezone
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, func, and_
from app.models.pricing import ElectricityPricing, PricingPeriod
from app.models.energy import EnergyData, EnergyDailySummary
from app.models.device import Device
async def get_active_pricing(db: AsyncSession, energy_type: str = "electricity", date: datetime | None = None):
"""获取当前生效的电价配置"""
q = select(ElectricityPricing).where(
and_(
ElectricityPricing.energy_type == energy_type,
ElectricityPricing.is_active == True,
)
)
if date:
q = q.where(
and_(
(ElectricityPricing.effective_from == None) | (ElectricityPricing.effective_from <= date),
(ElectricityPricing.effective_to == None) | (ElectricityPricing.effective_to >= date),
)
)
q = q.order_by(ElectricityPricing.created_at.desc()).limit(1)
result = await db.execute(q)
return result.scalar_one_or_none()
async def get_pricing_periods(db: AsyncSession, pricing_id: int, month: int | None = None):
"""获取电价时段配置"""
q = select(PricingPeriod).where(PricingPeriod.pricing_id == pricing_id)
result = await db.execute(q)
periods = result.scalars().all()
if month is not None:
periods = [p for p in periods if p.applicable_months is None or month in p.applicable_months]
return periods
def get_period_for_hour(periods: list, hour: int) -> PricingPeriod | None:
"""根据小时确定所属时段"""
hour_str = f"{hour:02d}:00"
for p in periods:
start = p.start_time
end = p.end_time
if start <= end:
if start <= hour_str < end:
return p
else: # crosses midnight, e.g. 23:00 - 07:00
if hour_str >= start or hour_str < end:
return p
return periods[0] if periods else None
async def calculate_daily_cost(db: AsyncSession, date: datetime, device_id: int | None = None):
"""计算某天的用电费用"""
pricing = await get_active_pricing(db, "electricity", date)
if not pricing:
return 0.0
if pricing.pricing_type == "flat":
# 平价: 直接查日汇总
q = select(func.sum(EnergyDailySummary.total_consumption)).where(
and_(
EnergyDailySummary.date >= date.replace(hour=0, minute=0, second=0),
EnergyDailySummary.date < date.replace(hour=0, minute=0, second=0) + timedelta(days=1),
EnergyDailySummary.energy_type == "electricity",
)
)
if device_id:
q = q.where(EnergyDailySummary.device_id == device_id)
result = await db.execute(q)
total_energy = result.scalar() or 0.0
periods = await get_pricing_periods(db, pricing.id)
flat_price = periods[0].price_per_unit if periods else 0.0
cost = total_energy * flat_price
else:
# TOU分时: 按小时计算
periods = await get_pricing_periods(db, pricing.id, month=date.month)
if not periods:
return 0.0
cost = 0.0
day_start = date.replace(hour=0, minute=0, second=0, microsecond=0)
for hour in range(24):
hour_start = day_start + timedelta(hours=hour)
hour_end = hour_start + timedelta(hours=1)
q = select(func.sum(EnergyData.value)).where(
and_(
EnergyData.timestamp >= hour_start,
EnergyData.timestamp < hour_end,
EnergyData.data_type == "energy",
)
)
if device_id:
q = q.where(EnergyData.device_id == device_id)
result = await db.execute(q)
hour_energy = result.scalar() or 0.0
period = get_period_for_hour(periods, hour)
if period:
cost += hour_energy * period.price_per_unit
# Update daily summary cost
q = select(EnergyDailySummary).where(
and_(
EnergyDailySummary.date >= date.replace(hour=0, minute=0, second=0),
EnergyDailySummary.date < date.replace(hour=0, minute=0, second=0) + timedelta(days=1),
EnergyDailySummary.energy_type == "electricity",
)
)
if device_id:
q = q.where(EnergyDailySummary.device_id == device_id)
result = await db.execute(q)
for summary in result.scalars().all():
summary.cost = cost
return round(cost, 2)
async def get_cost_summary(
db: AsyncSession, start_date: datetime, end_date: datetime,
group_by: str = "day", energy_type: str = "electricity",
):
"""获取费用汇总"""
q = select(
EnergyDailySummary.date,
func.sum(EnergyDailySummary.total_consumption).label("consumption"),
func.sum(EnergyDailySummary.cost).label("cost"),
).where(
and_(
EnergyDailySummary.date >= start_date,
EnergyDailySummary.date <= end_date,
EnergyDailySummary.energy_type == energy_type,
)
)
if group_by == "device":
q = select(
EnergyDailySummary.device_id,
Device.name.label("device_name"),
func.sum(EnergyDailySummary.total_consumption).label("consumption"),
func.sum(EnergyDailySummary.cost).label("cost"),
).join(Device, EnergyDailySummary.device_id == Device.id, isouter=True).where(
and_(
EnergyDailySummary.date >= start_date,
EnergyDailySummary.date <= end_date,
EnergyDailySummary.energy_type == energy_type,
)
).group_by(EnergyDailySummary.device_id, Device.name)
result = await db.execute(q)
return [
{"device_id": r[0], "device_name": r[1] or f"Device#{r[0]}",
"consumption": round(r[2] or 0, 2), "cost": round(r[3] or 0, 2)}
for r in result.all()
]
elif group_by == "month":
from app.core.config import get_settings
settings = get_settings()
if settings.is_sqlite:
group_expr = func.strftime('%Y-%m', EnergyDailySummary.date).label('period')
else:
group_expr = func.to_char(EnergyDailySummary.date, 'YYYY-MM').label('period')
q = select(
group_expr,
func.sum(EnergyDailySummary.total_consumption).label("consumption"),
func.sum(EnergyDailySummary.cost).label("cost"),
).where(
and_(
EnergyDailySummary.date >= start_date,
EnergyDailySummary.date <= end_date,
EnergyDailySummary.energy_type == energy_type,
)
).group_by(group_expr).order_by(group_expr)
result = await db.execute(q)
return [
{"period": str(r[0]), "consumption": round(r[1] or 0, 2), "cost": round(r[2] or 0, 2)}
for r in result.all()
]
else: # day
q = q.group_by(EnergyDailySummary.date).order_by(EnergyDailySummary.date)
result = await db.execute(q)
return [
{"date": str(r[0]), "consumption": round(r[1] or 0, 2), "cost": round(r[2] or 0, 2)}
for r in result.all()
]
async def get_cost_breakdown(db: AsyncSession, start_date: datetime, end_date: datetime, energy_type: str = "electricity"):
"""获取峰谷平费用分布"""
pricing = await get_active_pricing(db, energy_type, start_date)
if not pricing:
return {"periods": [], "total_cost": 0, "total_consumption": 0}
periods = await get_pricing_periods(db, pricing.id)
if not periods:
return {"periods": [], "total_cost": 0, "total_consumption": 0}
# For each period, calculate the total energy consumption in those hours
breakdown = []
total_cost = 0.0
total_consumption = 0.0
for period in periods:
start_hour = int(period.start_time.split(":")[0])
end_hour = int(period.end_time.split(":")[0])
if start_hour < end_hour:
hours = list(range(start_hour, end_hour))
else: # crosses midnight
hours = list(range(start_hour, 24)) + list(range(0, end_hour))
period_energy = 0.0
current = start_date.replace(hour=0, minute=0, second=0, microsecond=0)
end = end_date.replace(hour=23, minute=59, second=59, microsecond=0)
# Sum energy for all matching hours in date range using daily summary approximation
q = select(func.sum(EnergyDailySummary.total_consumption)).where(
and_(
EnergyDailySummary.date >= start_date,
EnergyDailySummary.date <= end_date,
EnergyDailySummary.energy_type == energy_type,
)
)
result = await db.execute(q)
total_daily = result.scalar() or 0.0
# Approximate proportion based on hours in period vs 24h
proportion = len(hours) / 24.0
period_energy = total_daily * proportion
period_cost = period_energy * period.price_per_unit
total_cost += period_cost
total_consumption += period_energy
period_name_map = {
"peak": "尖峰", "sharp": "尖峰",
"high": "高峰", "shoulder": "高峰",
"flat": "平段",
"valley": "低谷", "off_peak": "低谷",
}
breakdown.append({
"period_name": period.period_name,
"period_label": period_name_map.get(period.period_name, period.period_name),
"start_time": period.start_time,
"end_time": period.end_time,
"price_per_unit": period.price_per_unit,
"consumption": round(period_energy, 2),
"cost": round(period_cost, 2),
"proportion": round(proportion * 100, 1),
})
return {
"periods": breakdown,
"total_cost": round(total_cost, 2),
"total_consumption": round(total_consumption, 2),
"pricing_name": pricing.name,
"pricing_type": pricing.pricing_type,
}

View File

@@ -0,0 +1,105 @@
"""邮件发送服务 - SMTP email sending for alarm notifications and report delivery."""
import logging
import smtplib
import ssl
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.base import MIMEBase
from email import encoders
from pathlib import Path
from typing import Optional
from app.core.config import get_settings
logger = logging.getLogger("email_service")
async def send_email(
to: list[str],
subject: str,
body_html: str,
attachments: Optional[list[str]] = None,
) -> bool:
"""
Send an email via SMTP.
Args:
to: List of recipient email addresses.
subject: Email subject line.
body_html: HTML body content.
attachments: Optional list of file paths to attach.
Returns:
True if sent successfully, False otherwise.
"""
settings = get_settings()
if not settings.SMTP_ENABLED:
logger.warning("SMTP is not enabled (SMTP_ENABLED=False). Skipping email send.")
return False
if not settings.SMTP_HOST:
logger.warning("SMTP_HOST is not configured. Skipping email send.")
return False
if not to:
logger.warning("No recipients specified. Skipping email send.")
return False
try:
msg = MIMEMultipart("mixed")
msg["From"] = settings.SMTP_FROM
msg["To"] = ", ".join(to)
msg["Subject"] = subject
# HTML body
html_part = MIMEText(body_html, "html", "utf-8")
msg.attach(html_part)
# Attachments
if attachments:
for filepath in attachments:
path = Path(filepath)
if not path.exists():
logger.warning(f"Attachment not found, skipping: {filepath}")
continue
with open(path, "rb") as f:
part = MIMEBase("application", "octet-stream")
part.set_payload(f.read())
encoders.encode_base64(part)
part.add_header(
"Content-Disposition",
f'attachment; filename="{path.name}"',
)
msg.attach(part)
# Send via SMTP
context = ssl.create_default_context()
if settings.SMTP_PORT == 465:
# SSL connection
with smtplib.SMTP_SSL(settings.SMTP_HOST, settings.SMTP_PORT, context=context) as server:
if settings.SMTP_USER and settings.SMTP_PASSWORD:
server.login(settings.SMTP_USER, settings.SMTP_PASSWORD)
server.sendmail(settings.SMTP_FROM, to, msg.as_string())
else:
# STARTTLS connection (port 587 or 25)
with smtplib.SMTP(settings.SMTP_HOST, settings.SMTP_PORT) as server:
server.ehlo()
if settings.SMTP_PORT == 587:
server.starttls(context=context)
server.ehlo()
if settings.SMTP_USER and settings.SMTP_PASSWORD:
server.login(settings.SMTP_USER, settings.SMTP_PASSWORD)
server.sendmail(settings.SMTP_FROM, to, msg.as_string())
logger.info(f"Email sent successfully to {to}, subject: {subject}")
return True
except smtplib.SMTPException as e:
logger.error(f"SMTP error sending email to {to}: {e}")
return False
except Exception as e:
logger.error(f"Unexpected error sending email to {to}: {e}")
return False

View File

@@ -0,0 +1,419 @@
"""能源策略优化服务 - 峰谷电价策略、谷电蓄热、负荷转移、光伏自消纳"""
from datetime import datetime, date, timedelta, timezone
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, func, and_
from app.models.energy_strategy import (
TouPricing, TouPricingPeriod, EnergyStrategy, StrategyExecution, MonthlyCostReport,
)
from app.models.energy import EnergyData, EnergyDailySummary
# Beijing TZ offset
BJT = timezone(timedelta(hours=8))
# Default Beijing industrial TOU pricing
DEFAULT_PERIODS = [
{"period_type": "sharp_peak", "start_time": "10:00", "end_time": "15:00", "price": 1.3761},
{"period_type": "sharp_peak", "start_time": "18:00", "end_time": "21:00", "price": 1.3761},
{"period_type": "peak", "start_time": "08:00", "end_time": "10:00", "price": 1.1883},
{"period_type": "peak", "start_time": "15:00", "end_time": "18:00", "price": 1.1883},
{"period_type": "peak", "start_time": "21:00", "end_time": "23:00", "price": 1.1883},
{"period_type": "flat", "start_time": "07:00", "end_time": "08:00", "price": 0.7467},
{"period_type": "valley", "start_time": "23:00", "end_time": "07:00", "price": 0.3048},
]
PERIOD_LABELS = {
"sharp_peak": "尖峰",
"peak": "高峰",
"flat": "平段",
"valley": "低谷",
}
def parse_month_range(month_range: str | None) -> list[int] | None:
"""Parse month range string like '1-3,11-12' into list of month ints."""
if not month_range:
return None
months = []
for part in month_range.split(","):
part = part.strip()
if "-" in part:
start, end = part.split("-")
months.extend(range(int(start), int(end) + 1))
else:
months.append(int(part))
return months
def get_period_for_hour(periods: list[TouPricingPeriod], hour: int, month: int | None = None) -> TouPricingPeriod | None:
"""Determine which TOU period an hour falls into."""
hour_str = f"{hour:02d}:00"
for p in periods:
if month is not None and p.month_range:
applicable = parse_month_range(p.month_range)
if applicable and month not in applicable:
continue
start = p.start_time
end = p.end_time
if start <= end:
if start <= hour_str < end:
return p
else: # crosses midnight
if hour_str >= start or hour_str < end:
return p
return periods[0] if periods else None
async def get_active_tou_pricing(db: AsyncSession, target_date: date | None = None) -> TouPricing | None:
"""Get active TOU pricing plan."""
q = select(TouPricing).where(TouPricing.is_active == True)
if target_date:
q = q.where(
and_(
(TouPricing.effective_date == None) | (TouPricing.effective_date <= target_date),
(TouPricing.end_date == None) | (TouPricing.end_date >= target_date),
)
)
q = q.order_by(TouPricing.created_at.desc()).limit(1)
result = await db.execute(q)
return result.scalar_one_or_none()
async def get_tou_periods(db: AsyncSession, pricing_id: int) -> list[TouPricingPeriod]:
"""Get pricing periods for a TOU plan."""
result = await db.execute(
select(TouPricingPeriod).where(TouPricingPeriod.pricing_id == pricing_id)
)
return list(result.scalars().all())
async def calculate_hourly_cost(
db: AsyncSession, target_date: date, periods: list[TouPricingPeriod],
) -> dict:
"""Calculate hourly electricity cost for a specific date."""
day_start = datetime(target_date.year, target_date.month, target_date.day, tzinfo=BJT)
hourly_data = []
total_cost = 0.0
total_kwh = 0.0
for hour in range(24):
hour_start = day_start + timedelta(hours=hour)
hour_end = hour_start + timedelta(hours=1)
q = select(func.sum(EnergyData.value)).where(
and_(
EnergyData.timestamp >= hour_start,
EnergyData.timestamp < hour_end,
EnergyData.data_type == "energy",
)
)
result = await db.execute(q)
hour_kwh = result.scalar() or 0.0
period = get_period_for_hour(periods, hour, target_date.month)
price = period.price_yuan_per_kwh if period else 0.7467
period_type = period.period_type if period else "flat"
cost = hour_kwh * price
total_cost += cost
total_kwh += hour_kwh
hourly_data.append({
"hour": hour,
"consumption_kwh": round(hour_kwh, 2),
"price": price,
"cost": round(cost, 2),
"period_type": period_type,
"period_label": PERIOD_LABELS.get(period_type, period_type),
})
return {
"date": str(target_date),
"hourly": hourly_data,
"total_cost": round(total_cost, 2),
"total_kwh": round(total_kwh, 2),
}
async def calculate_monthly_cost_breakdown(
db: AsyncSession, year: int, month: int,
) -> dict:
"""Calculate monthly cost breakdown by TOU period type."""
pricing = await get_active_tou_pricing(db, date(year, month, 1))
if not pricing:
return _empty_cost_breakdown(year, month)
periods = await get_tou_periods(db, pricing.id)
if not periods:
return _empty_cost_breakdown(year, month)
# Build hour -> period mapping
period_stats = {pt: {"kwh": 0.0, "cost": 0.0, "hours": 0}
for pt in ["sharp_peak", "peak", "flat", "valley"]}
for hour in range(24):
period = get_period_for_hour(periods, hour, month)
if not period:
continue
pt = period.period_type
if pt not in period_stats:
period_stats[pt] = {"kwh": 0.0, "cost": 0.0, "hours": 0}
period_stats[pt]["hours"] += 1
# Get daily summaries for the month
month_start = date(year, month, 1)
if month == 12:
month_end = date(year + 1, 1, 1)
else:
month_end = date(year, month + 1, 1)
q = select(
func.sum(EnergyDailySummary.total_consumption),
).where(
and_(
EnergyDailySummary.date >= datetime(month_start.year, month_start.month, month_start.day),
EnergyDailySummary.date < datetime(month_end.year, month_end.month, month_end.day),
EnergyDailySummary.energy_type == "electricity",
)
)
result = await db.execute(q)
total_monthly_kwh = result.scalar() or 0.0
# Distribute by hour proportion
total_hours = sum(ps["hours"] for ps in period_stats.values())
for pt, ps in period_stats.items():
proportion = ps["hours"] / total_hours if total_hours > 0 else 0
ps["kwh"] = total_monthly_kwh * proportion
period_obj = next((p for p in periods if p.period_type == pt), None)
price = period_obj.price_yuan_per_kwh if period_obj else 0
ps["cost"] = ps["kwh"] * price
total_cost = sum(ps["cost"] for ps in period_stats.values())
breakdown = []
for pt, ps in period_stats.items():
if ps["hours"] == 0:
continue
breakdown.append({
"period_type": pt,
"period_label": PERIOD_LABELS.get(pt, pt),
"consumption_kwh": round(ps["kwh"], 2),
"cost_yuan": round(ps["cost"], 2),
"hours_per_day": ps["hours"],
"proportion": round(ps["kwh"] / total_monthly_kwh * 100, 1) if total_monthly_kwh > 0 else 0,
})
return {
"year_month": f"{year}-{month:02d}",
"total_consumption_kwh": round(total_monthly_kwh, 2),
"total_cost_yuan": round(total_cost, 2),
"breakdown": breakdown,
"pricing_name": pricing.name,
}
def _empty_cost_breakdown(year: int, month: int) -> dict:
return {
"year_month": f"{year}-{month:02d}",
"total_consumption_kwh": 0,
"total_cost_yuan": 0,
"breakdown": [],
"pricing_name": "未配置",
}
def calculate_heat_storage_savings(
daily_kwh: float, periods: list[TouPricingPeriod], shift_ratio: float = 0.3,
) -> dict:
"""Calculate savings from valley-electricity heat storage strategy (谷电蓄热).
Assumes shift_ratio of heat pump load can be moved from peak/sharp_peak to valley hours.
"""
peak_prices = []
valley_price = 0.3048
for p in periods:
if p.period_type in ("sharp_peak", "peak"):
peak_prices.append(p.price_yuan_per_kwh)
elif p.period_type == "valley":
valley_price = p.price_yuan_per_kwh
avg_peak_price = sum(peak_prices) / len(peak_prices) if peak_prices else 1.2
shifted_kwh = daily_kwh * shift_ratio
savings_per_day = shifted_kwh * (avg_peak_price - valley_price)
return {
"shifted_kwh": round(shifted_kwh, 2),
"avg_peak_price": round(avg_peak_price, 4),
"valley_price": round(valley_price, 4),
"savings_per_day": round(savings_per_day, 2),
"savings_per_month": round(savings_per_day * 30, 2),
"savings_per_year": round(savings_per_day * 365, 2),
"strategy": "谷电蓄热",
"description": f"{shift_ratio*100:.0f}%的热泵负荷从尖峰/高峰时段转移至低谷时段(23:00-7:00)预热水箱",
}
def calculate_pv_priority_savings(
pv_daily_kwh: float, grid_price: float = 0.7467, feed_in_price: float = 0.3548,
) -> dict:
"""Calculate savings from PV self-consumption priority strategy."""
self_consume_value = pv_daily_kwh * grid_price
feed_in_value = pv_daily_kwh * feed_in_price
savings_per_day = self_consume_value - feed_in_value
return {
"pv_daily_kwh": round(pv_daily_kwh, 2),
"self_consume_value": round(self_consume_value, 2),
"feed_in_value": round(feed_in_value, 2),
"savings_per_day": round(savings_per_day, 2),
"savings_per_month": round(savings_per_day * 30, 2),
"strategy": "光伏自消纳优先",
"description": "优先使用光伏发电供给园区负荷,减少向电网购电",
}
def simulate_strategy_impact(
daily_consumption_kwh: float,
pv_daily_kwh: float,
periods: list[TouPricingPeriod],
strategies: list[str],
) -> dict:
"""Simulate impact of enabling various strategies."""
baseline_cost = 0.0
optimized_cost = 0.0
# Calculate baseline cost (proportional by hours)
period_hours = {}
for p in periods:
start_h = int(p.start_time.split(":")[0])
end_h = int(p.end_time.split(":")[0])
if start_h < end_h:
hours = end_h - start_h
else:
hours = (24 - start_h) + end_h
period_hours[p.period_type] = period_hours.get(p.period_type, 0) + hours
total_hours = sum(period_hours.values()) or 24
for p in periods:
start_h = int(p.start_time.split(":")[0])
end_h = int(p.end_time.split(":")[0])
hours = end_h - start_h if start_h < end_h else (24 - start_h) + end_h
proportion = hours / total_hours
kwh = daily_consumption_kwh * proportion
baseline_cost += kwh * p.price_yuan_per_kwh
optimized_cost = baseline_cost
savings_details = []
if "heat_storage" in strategies:
hs = calculate_heat_storage_savings(daily_consumption_kwh * 0.4, periods, 0.3)
optimized_cost -= hs["savings_per_day"]
savings_details.append(hs)
if "pv_priority" in strategies:
pv = calculate_pv_priority_savings(pv_daily_kwh)
optimized_cost -= pv["savings_per_day"]
savings_details.append(pv)
if "load_shift" in strategies:
# Shift 15% of peak load to flat/valley
valley_p = next((p for p in periods if p.period_type == "valley"), None)
peak_p = next((p for p in periods if p.period_type == "sharp_peak"), None)
if valley_p and peak_p:
shift_kwh = daily_consumption_kwh * 0.15
saved = shift_kwh * (peak_p.price_yuan_per_kwh - valley_p.price_yuan_per_kwh)
optimized_cost -= saved
savings_details.append({
"strategy": "负荷转移",
"savings_per_day": round(saved, 2),
"savings_per_month": round(saved * 30, 2),
"description": "将15%的尖峰时段负荷转移至低谷时段",
})
return {
"baseline_cost_per_day": round(baseline_cost, 2),
"optimized_cost_per_day": round(max(0, optimized_cost), 2),
"total_savings_per_day": round(baseline_cost - max(0, optimized_cost), 2),
"total_savings_per_month": round((baseline_cost - max(0, optimized_cost)) * 30, 2),
"total_savings_per_year": round((baseline_cost - max(0, optimized_cost)) * 365, 2),
"savings_percentage": round((1 - max(0, optimized_cost) / baseline_cost) * 100, 1) if baseline_cost > 0 else 0,
"details": savings_details,
}
async def get_recommendations(db: AsyncSession) -> list[dict]:
"""Generate current strategy recommendations based on data."""
recommendations = []
# Always recommend valley heat storage for heating season
now = datetime.now(BJT)
month = now.month
if month in (11, 12, 1, 2, 3):
recommendations.append({
"type": "heat_storage",
"title": "谷电蓄热策略",
"description": "当前为采暖季,建议在低谷时段(23:00-7:00)预热水箱,减少尖峰时段热泵运行",
"priority": "high",
"estimated_savings": "每月可节约约3000-5000元",
})
# PV priority during daytime
recommendations.append({
"type": "pv_priority",
"title": "光伏自消纳优先",
"description": "优先使用屋顶光伏发电满足园区负荷,减少购电成本",
"priority": "medium",
"estimated_savings": "每月可节约约1500-2500元",
})
# Load shifting
hour = now.hour
if 10 <= hour <= 15 or 18 <= hour <= 21:
recommendations.append({
"type": "load_shift",
"title": "当前处于尖峰时段",
"description": "建议减少非必要大功率设备运行,可延迟至低谷时段执行",
"priority": "high",
"estimated_savings": "尖峰电价1.3761元/kWh,低谷电价0.3048元/kWh",
})
return recommendations
async def get_savings_report(db: AsyncSession, year: int) -> dict:
"""Generate yearly savings report."""
reports = []
total_savings = 0.0
total_baseline = 0.0
total_optimized = 0.0
result = await db.execute(
select(MonthlyCostReport).where(
MonthlyCostReport.year_month.like(f"{year}-%")
).order_by(MonthlyCostReport.year_month)
)
monthly_reports = result.scalars().all()
for r in monthly_reports:
reports.append({
"year_month": r.year_month,
"total_consumption_kwh": r.total_consumption_kwh,
"total_cost_yuan": r.total_cost_yuan,
"baseline_cost": r.baseline_cost,
"optimized_cost": r.optimized_cost,
"savings_yuan": r.savings_yuan,
})
total_savings += r.savings_yuan
total_baseline += r.baseline_cost
total_optimized += r.optimized_cost
return {
"year": year,
"monthly_reports": reports,
"total_savings_yuan": round(total_savings, 2),
"total_baseline_cost": round(total_baseline, 2),
"total_optimized_cost": round(total_optimized, 2),
"savings_percentage": round(total_savings / total_baseline * 100, 1) if total_baseline > 0 else 0,
}

View File

@@ -0,0 +1,124 @@
"""配额检测服务 - 计算配额使用率,超限时生成告警事件"""
import logging
from datetime import datetime, timezone, timedelta
from sqlalchemy import select, func, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.quota import EnergyQuota, QuotaUsage
from app.models.alarm import AlarmEvent
from app.models.energy import EnergyDailySummary
logger = logging.getLogger("quota_checker")
def _get_period_range(period: str, now: datetime) -> tuple[datetime, datetime]:
"""根据配额周期计算当前统计区间"""
if period == "monthly":
start = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
# 下月1号
if now.month == 12:
end = start.replace(year=now.year + 1, month=1)
else:
end = start.replace(month=now.month + 1)
else: # yearly
start = now.replace(month=1, day=1, hour=0, minute=0, second=0, microsecond=0)
end = start.replace(year=now.year + 1)
return start, end
async def check_quotas(session: AsyncSession):
"""主配额检测循环,计算每个活跃配额的使用率并更新记录"""
now = datetime.now(timezone.utc)
result = await session.execute(
select(EnergyQuota).where(EnergyQuota.is_active == True)
)
quotas = result.scalars().all()
for quota in quotas:
period_start, period_end = _get_period_range(quota.period, now)
# 从 EnergyDailySummary 汇总实际用量
# target_id 对应 device_groups这里按 device_id 关联
# 简化处理:按 energy_type 汇总所有匹配设备的消耗
usage_query = select(func.coalesce(func.sum(EnergyDailySummary.total_consumption), 0)).where(
and_(
EnergyDailySummary.energy_type == quota.energy_type,
EnergyDailySummary.date >= period_start,
EnergyDailySummary.date < period_end,
)
)
actual_value = (await session.execute(usage_query)).scalar() or 0
# 计算使用率
usage_rate_pct = (actual_value / quota.quota_value * 100) if quota.quota_value > 0 else 0
# 确定状态
if usage_rate_pct >= quota.alert_threshold_pct:
status = "exceeded"
elif usage_rate_pct >= quota.warning_threshold_pct:
status = "warning"
else:
status = "normal"
# 更新或创建 QuotaUsage 记录
existing_result = await session.execute(
select(QuotaUsage).where(
and_(
QuotaUsage.quota_id == quota.id,
QuotaUsage.period_start == period_start,
QuotaUsage.period_end == period_end,
)
)
)
usage_record = existing_result.scalar_one_or_none()
if usage_record:
usage_record.actual_value = actual_value
usage_record.usage_rate_pct = usage_rate_pct
usage_record.status = status
usage_record.calculated_at = now
else:
usage_record = QuotaUsage(
quota_id=quota.id,
period_start=period_start,
period_end=period_end,
actual_value=actual_value,
quota_value=quota.quota_value,
usage_rate_pct=usage_rate_pct,
status=status,
)
session.add(usage_record)
# 超过预警阈值时生成告警事件
if status in ("warning", "exceeded"):
# 检查是否已存在未解决的同配额告警
active_alarm = await session.execute(
select(AlarmEvent).where(
and_(
AlarmEvent.title == f"配额预警: {quota.name}",
AlarmEvent.status.in_(["active", "acknowledged"]),
)
)
)
if not active_alarm.scalar_one_or_none():
severity = "critical" if status == "exceeded" else "warning"
event = AlarmEvent(
rule_id=None,
device_id=quota.target_id,
severity=severity,
title=f"配额预警: {quota.name}",
description=f"当前使用 {actual_value:.1f}{quota.unit}"
f"配额 {quota.quota_value:.1f}{quota.unit}"
f"使用率 {usage_rate_pct:.1f}%",
value=actual_value,
threshold=quota.quota_value,
status="active",
triggered_at=now,
)
session.add(event)
logger.info(
f"Quota alert: {quota.name} | usage={actual_value:.1f} "
f"quota={quota.quota_value:.1f} rate={usage_rate_pct:.1f}%"
)
await session.flush()

View File

@@ -0,0 +1,523 @@
"""
报表生成服务 - PDF/Excel report generation for Tianpu EMS.
"""
import os
import io
from datetime import datetime, date, timedelta
from pathlib import Path
from typing import Any
from sqlalchemy import select, func, and_
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.device import Device
from app.models.energy import EnergyDailySummary
from app.models.alarm import AlarmEvent
from app.models.carbon import CarbonEmission
REPORTS_DIR = Path(__file__).resolve().parent.parent.parent / "reports"
REPORTS_DIR.mkdir(exist_ok=True)
PLATFORM_TITLE = "天普零碳园区智慧能源管理平台"
ENERGY_TYPE_LABELS = {
"electricity": "电力",
"heat": "热能",
"water": "",
"gas": "天然气",
}
DEVICE_STATUS_LABELS = {
"online": "在线",
"offline": "离线",
"alarm": "告警",
"maintenance": "维护中",
}
SEVERITY_LABELS = {
"critical": "紧急",
"major": "重要",
"warning": "一般",
}
def _register_chinese_font():
"""Register a Chinese font for ReportLab PDF generation."""
from reportlab.pdfbase import pdfmetrics
from reportlab.pdfbase.ttfonts import TTFont
font_paths = [
"C:/Windows/Fonts/simsun.ttc",
"C:/Windows/Fonts/simhei.ttf",
"C:/Windows/Fonts/msyh.ttc",
"/usr/share/fonts/truetype/wqy/wqy-microhei.ttc",
"/usr/share/fonts/opentype/noto/NotoSansCJK-Regular.ttc",
"/System/Library/Fonts/PingFang.ttc",
]
for fp in font_paths:
if os.path.exists(fp):
try:
pdfmetrics.registerFont(TTFont("ChineseFont", fp))
return "ChineseFont"
except Exception:
continue
return "Helvetica"
class ReportGenerator:
"""Generates PDF and Excel reports from EMS data."""
def __init__(self, db: AsyncSession):
self.db = db
# ------------------------------------------------------------------ #
# Data fetching helpers
# ------------------------------------------------------------------ #
async def _fetch_energy_daily(
self, start_date: date, end_date: date, device_ids: list[int] | None = None
) -> list[dict]:
q = select(EnergyDailySummary).where(
and_(
func.date(EnergyDailySummary.date) >= start_date,
func.date(EnergyDailySummary.date) <= end_date,
)
)
if device_ids:
q = q.where(EnergyDailySummary.device_id.in_(device_ids))
q = q.order_by(EnergyDailySummary.date)
result = await self.db.execute(q)
rows = result.scalars().all()
return [
{
"date": str(r.date.date()) if r.date else "",
"device_id": r.device_id,
"energy_type": ENERGY_TYPE_LABELS.get(r.energy_type, r.energy_type),
"total_consumption": round(r.total_consumption or 0, 2),
"total_generation": round(r.total_generation or 0, 2),
"peak_power": round(r.peak_power or 0, 2),
"avg_power": round(r.avg_power or 0, 2),
"operating_hours": round(r.operating_hours or 0, 1),
"cost": round(r.cost or 0, 2),
"carbon_emission": round(r.carbon_emission or 0, 2),
}
for r in rows
]
async def _fetch_devices(self) -> list[dict]:
result = await self.db.execute(
select(Device).where(Device.is_active == True).order_by(Device.id)
)
return [
{
"id": d.id,
"name": d.name,
"code": d.code,
"device_type": d.device_type,
"status": DEVICE_STATUS_LABELS.get(d.status, d.status),
"rated_power": d.rated_power or 0,
"location": d.location or "",
"last_data_time": str(d.last_data_time) if d.last_data_time else "N/A",
}
for d in result.scalars().all()
]
async def _fetch_alarms(self, start_date: date, end_date: date) -> list[dict]:
q = select(AlarmEvent).where(
and_(
func.date(AlarmEvent.triggered_at) >= start_date,
func.date(AlarmEvent.triggered_at) <= end_date,
)
).order_by(AlarmEvent.triggered_at.desc())
result = await self.db.execute(q)
return [
{
"id": a.id,
"device_id": a.device_id,
"severity": SEVERITY_LABELS.get(a.severity, a.severity),
"title": a.title,
"description": a.description or "",
"value": a.value,
"threshold": a.threshold,
"status": a.status,
"triggered_at": str(a.triggered_at) if a.triggered_at else "",
"resolved_at": str(a.resolved_at) if a.resolved_at else "",
}
for a in result.scalars().all()
]
async def _fetch_carbon(self, start_date: date, end_date: date) -> list[dict]:
q = select(CarbonEmission).where(
and_(
func.date(CarbonEmission.date) >= start_date,
func.date(CarbonEmission.date) <= end_date,
)
).order_by(CarbonEmission.date)
result = await self.db.execute(q)
return [
{
"date": str(c.date.date()) if c.date else "",
"scope": c.scope,
"category": c.category,
"emission": round(c.emission or 0, 2),
"reduction": round(c.reduction or 0, 2),
"energy_consumption": round(c.energy_consumption or 0, 2),
"energy_unit": c.energy_unit or "",
}
for c in result.scalars().all()
]
# ------------------------------------------------------------------ #
# Public generation methods
# ------------------------------------------------------------------ #
async def generate_energy_daily_report(
self,
start_date: date,
end_date: date,
device_ids: list[int] | None = None,
export_format: str = "xlsx",
) -> str:
data = await self._fetch_energy_daily(start_date, end_date, device_ids)
title = "每日能耗报表"
date_range_str = f"{start_date} ~ {end_date}"
summary = self._compute_energy_summary(data)
headers = ["日期", "设备ID", "能源类型", "消耗量", "产出量", "峰值功率(kW)", "平均功率(kW)", "运行时长(h)", "费用(元)", "碳排放(kgCO₂)"]
table_keys = ["date", "device_id", "energy_type", "total_consumption", "total_generation", "peak_power", "avg_power", "operating_hours", "cost", "carbon_emission"]
filename = f"energy_daily_{start_date}_{end_date}_{datetime.now().strftime('%H%M%S')}"
if export_format == "pdf":
return self._generate_pdf(title, date_range_str, summary, headers, table_keys, data, filename)
else:
return self._generate_excel(title, date_range_str, summary, headers, table_keys, data, filename)
async def generate_monthly_summary(
self, month: int, year: int, export_format: str = "xlsx"
) -> str:
start = date(year, month, 1)
if month == 12:
end = date(year + 1, 1, 1) - timedelta(days=1)
else:
end = date(year, month + 1, 1) - timedelta(days=1)
data = await self._fetch_energy_daily(start, end)
title = f"{year}{month}月能耗月报"
date_range_str = f"{start} ~ {end}"
summary = self._compute_energy_summary(data)
headers = ["日期", "设备ID", "能源类型", "消耗量", "产出量", "峰值功率(kW)", "平均功率(kW)", "运行时长(h)", "费用(元)", "碳排放(kgCO₂)"]
table_keys = ["date", "device_id", "energy_type", "total_consumption", "total_generation", "peak_power", "avg_power", "operating_hours", "cost", "carbon_emission"]
filename = f"monthly_summary_{year}_{month:02d}"
if export_format == "pdf":
return self._generate_pdf(title, date_range_str, summary, headers, table_keys, data, filename)
else:
return self._generate_excel(title, date_range_str, summary, headers, table_keys, data, filename)
async def generate_device_status_report(self, export_format: str = "xlsx") -> str:
data = await self._fetch_devices()
title = "设备状态报表"
date_range_str = f"生成时间: {datetime.now().strftime('%Y-%m-%d %H:%M')}"
summary = self._compute_device_summary(data)
headers = ["设备ID", "设备名称", "设备编号", "设备类型", "状态", "额定功率(kW)", "位置", "最近数据时间"]
table_keys = ["id", "name", "code", "device_type", "status", "rated_power", "location", "last_data_time"]
filename = f"device_status_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
if export_format == "pdf":
return self._generate_pdf(title, date_range_str, summary, headers, table_keys, data, filename)
else:
return self._generate_excel(title, date_range_str, summary, headers, table_keys, data, filename)
async def generate_alarm_report(
self, start_date: date, end_date: date, export_format: str = "xlsx"
) -> str:
data = await self._fetch_alarms(start_date, end_date)
title = "告警分析报表"
date_range_str = f"{start_date} ~ {end_date}"
summary = self._compute_alarm_summary(data)
headers = ["告警ID", "设备ID", "严重程度", "标题", "描述", "触发值", "阈值", "状态", "触发时间", "解决时间"]
table_keys = ["id", "device_id", "severity", "title", "description", "value", "threshold", "status", "triggered_at", "resolved_at"]
filename = f"alarm_report_{start_date}_{end_date}"
if export_format == "pdf":
return self._generate_pdf(title, date_range_str, summary, headers, table_keys, data, filename)
else:
return self._generate_excel(title, date_range_str, summary, headers, table_keys, data, filename)
async def generate_carbon_report(
self, start_date: date, end_date: date, export_format: str = "xlsx"
) -> str:
data = await self._fetch_carbon(start_date, end_date)
title = "碳排放分析报表"
date_range_str = f"{start_date} ~ {end_date}"
summary = self._compute_carbon_summary(data)
headers = ["日期", "范围", "类别", "排放量(kgCO₂e)", "减排量(kgCO₂e)", "能耗", "单位"]
table_keys = ["date", "scope", "category", "emission", "reduction", "energy_consumption", "energy_unit"]
filename = f"carbon_report_{start_date}_{end_date}"
if export_format == "pdf":
return self._generate_pdf(title, date_range_str, summary, headers, table_keys, data, filename)
else:
return self._generate_excel(title, date_range_str, summary, headers, table_keys, data, filename)
# ------------------------------------------------------------------ #
# Summary computation helpers
# ------------------------------------------------------------------ #
@staticmethod
def _compute_energy_summary(data: list[dict]) -> list[tuple[str, str]]:
total_consumption = sum(r["total_consumption"] for r in data)
total_generation = sum(r["total_generation"] for r in data)
total_cost = sum(r["cost"] for r in data)
total_carbon = sum(r["carbon_emission"] for r in data)
return [
("数据条数", str(len(data))),
("总消耗量", f"{total_consumption:,.2f}"),
("总产出量", f"{total_generation:,.2f}"),
("总费用(元)", f"{total_cost:,.2f}"),
("总碳排放(kgCO₂)", f"{total_carbon:,.2f}"),
]
@staticmethod
def _compute_device_summary(data: list[dict]) -> list[tuple[str, str]]:
total = len(data)
online = sum(1 for d in data if d["status"] == "在线")
offline = sum(1 for d in data if d["status"] == "离线")
alarm = sum(1 for d in data if d["status"] == "告警")
return [
("设备总数", str(total)),
("在线", str(online)),
("离线", str(offline)),
("告警", str(alarm)),
]
@staticmethod
def _compute_alarm_summary(data: list[dict]) -> list[tuple[str, str]]:
total = len(data)
critical = sum(1 for a in data if a["severity"] == "紧急")
major = sum(1 for a in data if a["severity"] == "重要")
resolved = sum(1 for a in data if a["status"] == "resolved")
return [
("告警总数", str(total)),
("紧急", str(critical)),
("重要", str(major)),
("已解决", str(resolved)),
]
@staticmethod
def _compute_carbon_summary(data: list[dict]) -> list[tuple[str, str]]:
total_emission = sum(r["emission"] for r in data)
total_reduction = sum(r["reduction"] for r in data)
net = total_emission - total_reduction
return [
("数据条数", str(len(data))),
("总排放(kgCO₂e)", f"{total_emission:,.2f}"),
("总减排(kgCO₂e)", f"{total_reduction:,.2f}"),
("净排放(kgCO₂e)", f"{net:,.2f}"),
]
# ------------------------------------------------------------------ #
# PDF generation (ReportLab)
# ------------------------------------------------------------------ #
def _generate_pdf(
self,
title: str,
date_range_str: str,
summary: list[tuple[str, str]],
headers: list[str],
table_keys: list[str],
data: list[dict],
filename: str,
) -> str:
from reportlab.lib.pagesizes import A4
from reportlab.lib import colors
from reportlab.lib.styles import ParagraphStyle
from reportlab.lib.units import mm
from reportlab.platypus import (
SimpleDocTemplate, Table, TableStyle, Paragraph, Spacer
)
font_name = _register_chinese_font()
filepath = str(REPORTS_DIR / f"{filename}.pdf")
doc = SimpleDocTemplate(
filepath, pagesize=A4,
topMargin=20 * mm, bottomMargin=20 * mm,
leftMargin=15 * mm, rightMargin=15 * mm,
)
title_style = ParagraphStyle(
"Title", fontName=font_name, fontSize=16, alignment=1, spaceAfter=6,
)
subtitle_style = ParagraphStyle(
"Subtitle", fontName=font_name, fontSize=10, alignment=1,
textColor=colors.grey, spaceAfter=4,
)
section_style = ParagraphStyle(
"Section", fontName=font_name, fontSize=12, spaceBefore=12, spaceAfter=6,
)
normal_style = ParagraphStyle(
"Normal", fontName=font_name, fontSize=9,
)
elements: list[Any] = []
# Header
elements.append(Paragraph(PLATFORM_TITLE, subtitle_style))
elements.append(Paragraph(title, title_style))
elements.append(Paragraph(date_range_str, subtitle_style))
elements.append(Spacer(1, 8 * mm))
# Summary section
elements.append(Paragraph("概要", section_style))
summary_data = [[Paragraph(k, normal_style), Paragraph(v, normal_style)] for k, v in summary]
summary_table = Table(summary_data, colWidths=[120, 200])
summary_table.setStyle(TableStyle([
("BACKGROUND", (0, 0), (0, -1), colors.Color(0.94, 0.94, 0.94)),
("GRID", (0, 0), (-1, -1), 0.5, colors.grey),
("VALIGN", (0, 0), (-1, -1), "MIDDLE"),
("LEFTPADDING", (0, 0), (-1, -1), 6),
("RIGHTPADDING", (0, 0), (-1, -1), 6),
("TOPPADDING", (0, 0), (-1, -1), 4),
("BOTTOMPADDING", (0, 0), (-1, -1), 4),
]))
elements.append(summary_table)
elements.append(Spacer(1, 8 * mm))
# Detail table
elements.append(Paragraph("明细数据", section_style))
if data:
page_width = A4[0] - 30 * mm
col_width = page_width / len(headers)
header_row = [Paragraph(h, ParagraphStyle("H", fontName=font_name, fontSize=7, alignment=1)) for h in headers]
table_data = [header_row]
cell_style = ParagraphStyle("Cell", fontName=font_name, fontSize=7)
for row in data[:500]: # limit rows for PDF
table_data.append([Paragraph(str(row.get(k, "")), cell_style) for k in table_keys])
detail_table = Table(table_data, colWidths=[col_width] * len(headers), repeatRows=1)
detail_table.setStyle(TableStyle([
("BACKGROUND", (0, 0), (-1, 0), colors.Color(0.2, 0.4, 0.7)),
("TEXTCOLOR", (0, 0), (-1, 0), colors.white),
("GRID", (0, 0), (-1, -1), 0.5, colors.lightgrey),
("ROWBACKGROUNDS", (0, 1), (-1, -1), [colors.white, colors.Color(0.96, 0.96, 0.96)]),
("VALIGN", (0, 0), (-1, -1), "MIDDLE"),
("LEFTPADDING", (0, 0), (-1, -1), 3),
("RIGHTPADDING", (0, 0), (-1, -1), 3),
("TOPPADDING", (0, 0), (-1, -1), 3),
("BOTTOMPADDING", (0, 0), (-1, -1), 3),
]))
elements.append(detail_table)
if len(data) > 500:
elements.append(Spacer(1, 4 * mm))
elements.append(Paragraph(f"(共 {len(data)} 条记录PDF 仅显示前500条)", normal_style))
else:
elements.append(Paragraph("暂无数据", normal_style))
# Footer
elements.append(Spacer(1, 10 * mm))
footer_style = ParagraphStyle("Footer", fontName=font_name, fontSize=8, textColor=colors.grey, alignment=2)
elements.append(Paragraph(f"生成时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}", footer_style))
doc.build(elements)
return filepath
# ------------------------------------------------------------------ #
# Excel generation (OpenPyXL)
# ------------------------------------------------------------------ #
def _generate_excel(
self,
title: str,
date_range_str: str,
summary: list[tuple[str, str]],
headers: list[str],
table_keys: list[str],
data: list[dict],
filename: str,
) -> str:
from openpyxl import Workbook
from openpyxl.styles import Font, Alignment, PatternFill, Border, Side
filepath = str(REPORTS_DIR / f"{filename}.xlsx")
wb = Workbook()
header_font = Font(bold=True, color="FFFFFF", size=11)
header_fill = PatternFill(start_color="336699", end_color="336699", fill_type="solid")
header_align = Alignment(horizontal="center", vertical="center", wrap_text=True)
thin_border = Border(
left=Side(style="thin", color="CCCCCC"),
right=Side(style="thin", color="CCCCCC"),
top=Side(style="thin", color="CCCCCC"),
bottom=Side(style="thin", color="CCCCCC"),
)
title_font = Font(bold=True, size=14)
subtitle_font = Font(size=10, color="666666")
summary_key_fill = PatternFill(start_color="F0F0F0", end_color="F0F0F0", fill_type="solid")
# --- Summary sheet ---
ws_summary = wb.active
ws_summary.title = "概要"
ws_summary.append([PLATFORM_TITLE])
ws_summary.merge_cells("A1:D1")
ws_summary["A1"].font = title_font
ws_summary.append([title])
ws_summary.merge_cells("A2:D2")
ws_summary["A2"].font = Font(bold=True, size=12)
ws_summary.append([date_range_str])
ws_summary.merge_cells("A3:D3")
ws_summary["A3"].font = subtitle_font
ws_summary.append([])
ws_summary.append(["指标", ""])
ws_summary["A5"].font = Font(bold=True)
ws_summary["B5"].font = Font(bold=True)
for label, value in summary:
row = ws_summary.max_row + 1
ws_summary.append([label, value])
ws_summary.cell(row=row, column=1).fill = summary_key_fill
ws_summary.column_dimensions["A"].width = 25
ws_summary.column_dimensions["B"].width = 30
# --- Detail sheet ---
ws_detail = wb.create_sheet("明细数据")
# Header row
for col_idx, h in enumerate(headers, 1):
cell = ws_detail.cell(row=1, column=col_idx, value=h)
cell.font = header_font
cell.fill = header_fill
cell.alignment = header_align
cell.border = thin_border
# Data rows
for row_idx, row_data in enumerate(data, 2):
for col_idx, key in enumerate(table_keys, 1):
val = row_data.get(key, "")
cell = ws_detail.cell(row=row_idx, column=col_idx, value=val)
cell.border = thin_border
if isinstance(val, float):
cell.number_format = "#,##0.00"
cell.alignment = Alignment(vertical="center")
# Auto-width columns
for col_idx in range(1, len(headers) + 1):
max_len = len(str(headers[col_idx - 1]))
for row_idx in range(2, min(len(data) + 2, 102)):
val = ws_detail.cell(row=row_idx, column=col_idx).value
if val:
max_len = max(max_len, len(str(val)))
ws_detail.column_dimensions[ws_detail.cell(row=1, column=col_idx).column_letter].width = min(max_len + 4, 40)
# Auto-filter
if data:
ws_detail.auto_filter.ref = f"A1:{ws_detail.cell(row=1, column=len(headers)).column_letter}{len(data) + 1}"
# Freeze header
ws_detail.freeze_panes = "A2"
wb.save(filepath)
return filepath

View File

@@ -0,0 +1,192 @@
"""报表定时调度服务 - Schedule report tasks via APScheduler and send results by email."""
import logging
from datetime import date, timedelta, datetime, timezone
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.triggers.cron import CronTrigger
from sqlalchemy import select
from app.core.database import async_session
from app.models.report import ReportTask, ReportTemplate
from app.services.report_generator import ReportGenerator
from app.services.email_service import send_email
logger = logging.getLogger("report_scheduler")
_scheduler: AsyncIOScheduler | None = None
def _parse_cron(cron_expr: str) -> dict:
"""Parse a 5-field cron expression into APScheduler CronTrigger kwargs."""
parts = cron_expr.strip().split()
if len(parts) != 5:
raise ValueError(f"Invalid cron expression (need 5 fields): {cron_expr}")
return {
"minute": parts[0],
"hour": parts[1],
"day": parts[2],
"month": parts[3],
"day_of_week": parts[4],
}
async def _run_report_task(task_id: int):
"""Execute a single report task: generate the report and email it to recipients."""
logger.info(f"Running scheduled report task id={task_id}")
async with async_session() as session:
# Load task
task_result = await session.execute(
select(ReportTask).where(ReportTask.id == task_id)
)
task = task_result.scalar_one_or_none()
if not task:
logger.warning(f"Report task id={task_id} not found, skipping.")
return
if not task.is_active:
logger.info(f"Report task id={task_id} is inactive, skipping.")
return
# Update status
task.status = "running"
task.last_run = datetime.now(timezone.utc)
await session.flush()
# Load template to determine report type
tmpl_result = await session.execute(
select(ReportTemplate).where(ReportTemplate.id == task.template_id)
)
template = tmpl_result.scalar_one_or_none()
if not template:
logger.error(f"Template id={task.template_id} not found for task id={task_id}")
task.status = "failed"
await session.commit()
return
try:
generator = ReportGenerator(session)
today = date.today()
export_format = task.export_format or "xlsx"
# Choose generation method based on template report_type
if template.report_type == "daily":
yesterday = today - timedelta(days=1)
filepath = await generator.generate_energy_daily_report(
start_date=yesterday, end_date=yesterday, export_format=export_format
)
elif template.report_type == "monthly":
# Generate for previous month
first_of_month = today.replace(day=1)
last_month_end = first_of_month - timedelta(days=1)
last_month_start = last_month_end.replace(day=1)
filepath = await generator.generate_monthly_summary(
month=last_month_start.month,
year=last_month_start.year,
export_format=export_format,
)
elif template.report_type == "custom" and "device" in template.name.lower():
filepath = await generator.generate_device_status_report(
export_format=export_format
)
else:
# Default: daily report for yesterday
yesterday = today - timedelta(days=1)
filepath = await generator.generate_energy_daily_report(
start_date=yesterday, end_date=yesterday, export_format=export_format
)
task.file_path = filepath
task.status = "completed"
logger.info(f"Report task id={task_id} completed: {filepath}")
# Send email with attachment if recipients configured
recipients = task.recipients or []
if isinstance(recipients, list) and recipients:
report_name = task.name or template.name
subject = f"{report_name} - 天普EMS自动报表"
body_html = f"""
<div style="font-family: 'Microsoft YaHei', sans-serif; padding: 20px;">
<h2 style="color: #1a73e8;">天普零碳园区智慧能源管理平台</h2>
<p>您好,</p>
<p>系统已自动生成 <strong>{report_name}</strong>,请查收附件。</p>
<p style="color: #666; font-size: 13px;">
生成时间: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}<br>
报表类型: {template.report_type}<br>
格式: {export_format.upper()}
</p>
<hr style="border: none; border-top: 1px solid #e8e8e8; margin: 20px 0;">
<p style="font-size: 12px; color: #999;">此为系统自动发送,请勿回复。</p>
</div>
"""
await send_email(
to=recipients,
subject=subject,
body_html=body_html,
attachments=[filepath],
)
except Exception as e:
logger.error(f"Report task id={task_id} failed: {e}", exc_info=True)
task.status = "failed"
await session.commit()
async def _load_and_schedule_tasks():
"""Load all active report tasks with schedules and register them with APScheduler."""
global _scheduler
if not _scheduler:
return
async with async_session() as session:
result = await session.execute(
select(ReportTask).where(
ReportTask.is_active == True,
ReportTask.schedule != None,
ReportTask.schedule != "",
)
)
tasks = result.scalars().all()
for task in tasks:
try:
cron_kwargs = _parse_cron(task.schedule)
_scheduler.add_job(
_run_report_task,
CronTrigger(**cron_kwargs),
args=[task.id],
id=f"report_task_{task.id}",
replace_existing=True,
misfire_grace_time=3600,
)
logger.info(
f"Scheduled report task id={task.id} name='{task.name}' "
f"cron='{task.schedule}'"
)
except Exception as e:
logger.error(f"Failed to schedule report task id={task.id}: {e}")
logger.info(f"Report scheduler loaded {len(tasks)} task(s).")
async def start_scheduler():
"""Start the APScheduler-based report scheduler."""
global _scheduler
if _scheduler and _scheduler.running:
logger.warning("Report scheduler is already running.")
return
_scheduler = AsyncIOScheduler(timezone="Asia/Shanghai")
_scheduler.start()
logger.info("Report scheduler started.")
await _load_and_schedule_tasks()
async def stop_scheduler():
"""Stop the report scheduler gracefully."""
global _scheduler
if _scheduler and _scheduler.running:
_scheduler.shutdown(wait=False)
logger.info("Report scheduler stopped.")
_scheduler = None

View File

@@ -0,0 +1,295 @@
"""模拟数据生成器 - 为天普园区设备生成真实感的模拟数据
Uses physics-based solar position, Beijing weather models, cloud transients,
temperature derating, and realistic building load patterns to produce data
that is convincing to industrial park asset owners.
"""
import asyncio
import random
import math
import logging
from datetime import datetime, timezone, timedelta
from sqlalchemy import select
from app.core.database import async_session
from app.models.device import Device
from app.models.energy import EnergyData
from app.models.alarm import AlarmEvent
from app.services.alarm_checker import check_alarms
from app.services.weather_model import (
pv_power, pv_electrical, get_pv_orientation,
heat_pump_data, building_load, indoor_sensor,
heat_meter_data, get_hvac_mode, outdoor_temperature,
should_skip_reading, should_go_offline,
)
logger = logging.getLogger("simulator")
class DataSimulator:
def __init__(self):
self._task = None
self._running = False
self._cycle_count = 0
# Track daily energy accumulators per device
self._daily_energy: dict[int, float] = {}
self._total_energy: dict[int, float] = {}
self._last_day: int = -1
# Track offline status per device
self._offline_until: dict[int, datetime] = {}
# Cache heat pump totals for heat meter correlation
self._last_hp_power: float = 0.0
self._last_hp_cop: float = 3.0
async def start(self):
self._running = True
self._task = asyncio.create_task(self._run_loop())
async def stop(self):
self._running = False
if self._task:
self._task.cancel()
async def _run_loop(self):
while self._running:
try:
await self._generate_data()
except Exception as e:
logger.error(f"Simulator error: {e}", exc_info=True)
await asyncio.sleep(15) # 每15秒生成一次
async def _generate_data(self):
now = datetime.now(timezone.utc)
beijing_dt = now + timedelta(hours=8)
self._cycle_count += 1
# Reset daily energy accumulators at midnight Beijing time
current_day = beijing_dt.timetuple().tm_yday
if current_day != self._last_day:
self._daily_energy.clear()
self._last_day = current_day
async with async_session() as session:
result = await session.execute(select(Device).where(Device.is_active == True))
devices = result.scalars().all()
data_points = []
hp_total_power = 0.0
hp_cop_sum = 0.0
hp_count = 0
# First pass: generate heat pump data (needed for heat meter correlation)
hp_results: dict[int, dict] = {}
for device in devices:
if device.device_type == "heat_pump":
hp_data = self._gen_heatpump_data(device, now)
hp_results[device.id] = hp_data
if hp_data:
hp_total_power += hp_data.get("_power", 0)
cop = hp_data.get("_cop", 0)
if cop > 0:
hp_cop_sum += cop
hp_count += 1
self._last_hp_power = hp_total_power
self._last_hp_cop = hp_cop_sum / hp_count if hp_count > 0 else 3.0
for device in devices:
# Simulate communication glitch: skip a reading ~1% of cycles
if should_skip_reading(self._cycle_count):
continue
# Simulate brief device offline events
if device.id in self._offline_until:
if now < self._offline_until[device.id]:
device.status = "offline"
continue
else:
del self._offline_until[device.id]
if should_go_offline():
self._offline_until[device.id] = now + timedelta(seconds=random.randint(15, 30))
device.status = "offline"
continue
points = self._generate_device_data(device, now, hp_results)
data_points.extend(points)
device.status = "online"
device.last_data_time = now
if data_points:
session.add_all(data_points)
await session.flush()
# Run alarm checker after data generation
try:
await check_alarms(session)
except Exception as e:
logger.error(f"Alarm checker error: {e}", exc_info=True)
await session.commit()
def _should_trigger_anomaly(self, anomaly_type: str) -> bool:
"""Determine if we should inject an anomalous value for demo purposes.
Preserves the existing alarm demo trigger pattern:
- PV low power: every ~10 min (40 cycles), lasts ~2 min (8 cycles)
- Heat pump low COP: every ~20 min (80 cycles), lasts ~2 min
- Sensor out of range: every ~30 min (120 cycles), lasts ~2 min
"""
c = self._cycle_count
if anomaly_type == "pv_low_power":
return (c % 40) < 8
elif anomaly_type == "hp_low_cop":
return (c % 80) < 8
elif anomaly_type == "sensor_out_of_range":
return (c % 120) < 8
return False
def _generate_device_data(self, device: Device, now: datetime,
hp_results: dict) -> list[EnergyData]:
points = []
if device.device_type == "pv_inverter":
points = self._gen_pv_data(device, now)
elif device.device_type == "heat_pump":
hp_data = hp_results.get(device.id)
if hp_data:
points = hp_data.get("_points", [])
elif device.device_type == "meter":
points = self._gen_meter_data(device, now)
elif device.device_type == "sensor":
points = self._gen_sensor_data(device, now)
elif device.device_type == "heat_meter":
points = self._gen_heat_meter_data(device, now)
return points
def _gen_pv_data(self, device: Device, now: datetime) -> list[EnergyData]:
"""光伏逆变器数据 - 基于太阳位置、云层、温度降额模型"""
rated = device.rated_power or 110.0
orientation = get_pv_orientation(device.code)
power = pv_power(now, rated_power=rated, orientation=orientation,
device_code=device.code)
# Demo anomaly: cloud cover drops INV-01 power very low for alarm testing
if self._should_trigger_anomaly("pv_low_power") and device.code == "INV-01":
power = random.uniform(1.0, 3.0)
elec = pv_electrical(power, rated_power=rated)
# Demo anomaly: over-temperature for alarm testing
if self._should_trigger_anomaly("pv_low_power") and device.code == "INV-01":
elec["temperature"] = round(random.uniform(67, 72), 1)
# Accumulate daily energy (power * 15s interval)
interval_hours = 15.0 / 3600.0
energy_increment = power * interval_hours
self._daily_energy[device.id] = self._daily_energy.get(device.id, 0) + energy_increment
# Total energy: start from a reasonable base
if device.id not in self._total_energy:
self._total_energy[device.id] = 170000 + random.uniform(0, 5000)
self._total_energy[device.id] += energy_increment
return [
EnergyData(device_id=device.id, timestamp=now, data_type="power",
value=round(power, 2), unit="kW"),
EnergyData(device_id=device.id, timestamp=now, data_type="daily_energy",
value=round(self._daily_energy[device.id], 2), unit="kWh"),
EnergyData(device_id=device.id, timestamp=now, data_type="total_energy",
value=round(self._total_energy[device.id], 1), unit="kWh"),
EnergyData(device_id=device.id, timestamp=now, data_type="dc_voltage",
value=elec["dc_voltage"], unit="V"),
EnergyData(device_id=device.id, timestamp=now, data_type="ac_voltage",
value=elec["ac_voltage"], unit="V"),
EnergyData(device_id=device.id, timestamp=now, data_type="temperature",
value=elec["temperature"], unit=""),
]
def _gen_heatpump_data(self, device: Device, now: datetime) -> dict:
"""热泵机组数据 - 基于室外温度和COP模型"""
rated = device.rated_power or 35.0
data = heat_pump_data(now, rated_power=rated, device_code=device.code)
cop = data["cop"]
power = data["power"]
# Demo anomaly: low COP for HP-01
if self._should_trigger_anomaly("hp_low_cop") and device.code == "HP-01":
cop = random.uniform(1.2, 1.8)
# Demo anomaly: overload for HP-02
if self._should_trigger_anomaly("hp_low_cop") and device.code == "HP-02":
power = random.uniform(39, 42)
points = [
EnergyData(device_id=device.id, timestamp=now, data_type="power",
value=round(power, 2), unit="kW"),
EnergyData(device_id=device.id, timestamp=now, data_type="cop",
value=round(cop, 2), unit=""),
EnergyData(device_id=device.id, timestamp=now, data_type="inlet_temp",
value=data["inlet_temp"], unit=""),
EnergyData(device_id=device.id, timestamp=now, data_type="outlet_temp",
value=data["outlet_temp"], unit=""),
EnergyData(device_id=device.id, timestamp=now, data_type="flow_rate",
value=data["flow_rate"], unit="m³/h"),
EnergyData(device_id=device.id, timestamp=now, data_type="outdoor_temp",
value=data["outdoor_temp"], unit=""),
]
return {
"_points": points,
"_power": power,
"_cop": cop,
}
def _gen_meter_data(self, device: Device, now: datetime) -> list[EnergyData]:
"""电表数据 - 基于建筑负荷模型(工作日/周末、午餐低谷、HVAC季节贡献"""
data = building_load(now, base_power=50.0, meter_code=device.code)
return [
EnergyData(device_id=device.id, timestamp=now, data_type="power",
value=data["power"], unit="kW"),
EnergyData(device_id=device.id, timestamp=now, data_type="voltage",
value=data["voltage"], unit="V"),
EnergyData(device_id=device.id, timestamp=now, data_type="current",
value=data["current"], unit="A"),
EnergyData(device_id=device.id, timestamp=now, data_type="power_factor",
value=data["power_factor"], unit=""),
]
def _gen_sensor_data(self, device: Device, now: datetime) -> list[EnergyData]:
"""温湿度传感器数据 - 室内HVAC控制 / 室外天气模型"""
is_outdoor = False
if device.metadata_:
is_outdoor = device.metadata_.get("type") == "outdoor"
data = indoor_sensor(now, is_outdoor=is_outdoor, device_code=device.code)
temp = data["temperature"]
# Demo anomaly: sensor out of range for alarm testing
if self._should_trigger_anomaly("sensor_out_of_range") and device.code == "TH-01":
temp = random.uniform(31, 34)
return [
EnergyData(device_id=device.id, timestamp=now, data_type="temperature",
value=round(temp, 1), unit=""),
EnergyData(device_id=device.id, timestamp=now, data_type="humidity",
value=data["humidity"], unit="%"),
]
def _gen_heat_meter_data(self, device: Device, now: datetime) -> list[EnergyData]:
"""热量表数据 - 与热泵运行功率和COP相关联"""
data = heat_meter_data(now, hp_power=self._last_hp_power,
hp_cop=self._last_hp_cop)
return [
EnergyData(device_id=device.id, timestamp=now, data_type="heat_power",
value=data["heat_power"], unit="kW"),
EnergyData(device_id=device.id, timestamp=now, data_type="flow_rate",
value=data["flow_rate"], unit="m³/h"),
EnergyData(device_id=device.id, timestamp=now, data_type="supply_temp",
value=data["supply_temp"], unit=""),
EnergyData(device_id=device.id, timestamp=now, data_type="return_temp",
value=data["return_temp"], unit=""),
]

View File

@@ -0,0 +1,739 @@
"""Beijing weather and solar position models for realistic data simulation.
Shared by both the real-time simulator and the backfill script.
Deterministic when given a seed — call set_seed() for reproducible backfills.
Tianpu campus: 39.9N, 116.4E (Beijing / Daxing district)
"""
import math
import random
from datetime import datetime, timezone, timedelta
# ---------------------------------------------------------------------------
# Constants
# ---------------------------------------------------------------------------
BEIJING_LAT = 39.9 # degrees N
BEIJING_LON = 116.4 # degrees E
BEIJING_TZ_OFFSET = 8 # UTC+8
# Monthly average temperatures for Beijing (index 0 = Jan)
# Source: typical climatological data
MONTHLY_AVG_TEMP = [-3.0, 0.0, 7.0, 14.0, 21.0, 26.0, 27.5, 26.0, 21.0, 13.0, 4.0, -1.5]
# Diurnal temperature swing amplitude by month (half-range)
MONTHLY_DIURNAL_SWING = [6.0, 7.0, 7.5, 8.0, 7.5, 7.0, 6.0, 6.0, 7.0, 7.5, 7.0, 6.0]
# Monthly average relative humidity (%)
MONTHLY_AVG_HUMIDITY = [44, 42, 38, 38, 45, 58, 72, 75, 62, 55, 50, 46]
# Sunrise/sunset hours (approximate, Beijing local time) by month
MONTHLY_SUNRISE = [7.5, 7.1, 6.4, 5.7, 5.2, 5.0, 5.1, 5.5, 6.0, 6.3, 6.8, 7.3]
MONTHLY_SUNSET = [17.1, 17.6, 18.2, 18.7, 19.2, 19.5, 19.4, 19.0, 18.3, 17.6, 17.0, 16.9]
# Solar declination approximation (degrees) for day-of-year
# and equation of time are computed analytically below
_rng = random.Random()
def set_seed(seed: int):
"""Set the random seed for reproducible data generation."""
global _rng
_rng = random.Random(seed)
def _gauss(mu: float, sigma: float) -> float:
return _rng.gauss(mu, sigma)
def _uniform(a: float, b: float) -> float:
return _rng.uniform(a, b)
def _random() -> float:
return _rng.random()
# ---------------------------------------------------------------------------
# Solar position (simplified but accurate enough for simulation)
# ---------------------------------------------------------------------------
def _day_of_year(dt: datetime) -> int:
"""Day of year 1-366."""
beijing_dt = dt + timedelta(hours=BEIJING_TZ_OFFSET) if dt.tzinfo else dt
return beijing_dt.timetuple().tm_yday
def solar_declination(day_of_year: int) -> float:
"""Solar declination in degrees."""
return 23.45 * math.sin(math.radians((360 / 365) * (day_of_year - 81)))
def _equation_of_time(day_of_year: int) -> float:
"""Equation of time in minutes."""
b = math.radians((360 / 365) * (day_of_year - 81))
return 9.87 * math.sin(2 * b) - 7.53 * math.cos(b) - 1.5 * math.sin(b)
def solar_altitude(dt: datetime) -> float:
"""Solar altitude angle in degrees for Beijing at given UTC datetime.
Returns negative values when sun is below horizon.
"""
doy = _day_of_year(dt)
decl = math.radians(solar_declination(doy))
lat = math.radians(BEIJING_LAT)
# Local solar time
beijing_dt = dt + timedelta(hours=BEIJING_TZ_OFFSET) if dt.tzinfo else dt
# Standard meridian for UTC+8 is 120E; Tianpu is at 116.4E
time_offset = _equation_of_time(doy) + 4 * (BEIJING_LON - 120.0)
solar_hour = beijing_dt.hour + beijing_dt.minute / 60.0 + beijing_dt.second / 3600.0
solar_hour += time_offset / 60.0
hour_angle = math.radians(15 * (solar_hour - 12))
sin_alt = (math.sin(lat) * math.sin(decl) +
math.cos(lat) * math.cos(decl) * math.cos(hour_angle))
return math.degrees(math.asin(max(-1, min(1, sin_alt))))
def solar_azimuth(dt: datetime) -> float:
"""Solar azimuth in degrees (0=N, 90=E, 180=S, 270=W)."""
doy = _day_of_year(dt)
decl = math.radians(solar_declination(doy))
lat = math.radians(BEIJING_LAT)
alt = math.radians(solar_altitude(dt))
if math.cos(alt) < 1e-6:
return 180.0
cos_az = (math.sin(decl) - math.sin(lat) * math.sin(alt)) / (math.cos(lat) * math.cos(alt))
cos_az = max(-1, min(1, cos_az))
az = math.degrees(math.acos(cos_az))
beijing_dt = dt + timedelta(hours=BEIJING_TZ_OFFSET) if dt.tzinfo else dt
if beijing_dt.hour + beijing_dt.minute / 60.0 > 12:
az = 360 - az
return az
# ---------------------------------------------------------------------------
# Cloud transient model
# ---------------------------------------------------------------------------
class CloudModel:
"""Simulates random cloud events that reduce PV output."""
def __init__(self):
self._events: list[dict] = [] # list of {start_minute, duration_minutes, opacity}
self._last_day: int = -1
def _generate_day_events(self, doy: int, month: int):
"""Generate cloud events for a day. More clouds in summer monsoon."""
self._events.clear()
self._last_day = doy
# Number of cloud events varies by season
if month in (7, 8): # monsoon
n_events = int(_uniform(3, 8))
elif month in (6, 9):
n_events = int(_uniform(2, 5))
elif month in (3, 4, 5, 10):
n_events = int(_uniform(1, 4))
else: # winter: clearer skies in Beijing
n_events = int(_uniform(0, 3))
for _ in range(n_events):
start = _uniform(6 * 60, 18 * 60) # minutes from midnight
duration = _uniform(2, 15)
opacity = _uniform(0.3, 0.7) # how much output drops
self._events.append({
"start": start,
"duration": duration,
"opacity": opacity,
})
def get_cloud_factor(self, dt: datetime) -> float:
"""Returns multiplier 0.3-1.0 (1.0 = clear sky)."""
beijing_dt = dt + timedelta(hours=BEIJING_TZ_OFFSET) if dt.tzinfo else dt
doy = beijing_dt.timetuple().tm_yday
month = beijing_dt.month
if doy != self._last_day:
self._generate_day_events(doy, month)
minute_of_day = beijing_dt.hour * 60 + beijing_dt.minute
factor = 1.0
for ev in self._events:
if ev["start"] <= minute_of_day <= ev["start"] + ev["duration"]:
factor = min(factor, 1.0 - ev["opacity"])
return factor
# Global cloud model instance (shared across inverters for correlated weather)
_cloud_model = CloudModel()
def get_cloud_factor(dt: datetime) -> float:
return _cloud_model.get_cloud_factor(dt)
def reset_cloud_model():
"""Reset cloud model (useful for backfill where each day is independent)."""
global _cloud_model
_cloud_model = CloudModel()
# ---------------------------------------------------------------------------
# Outdoor temperature model
# ---------------------------------------------------------------------------
def outdoor_temperature(dt: datetime) -> float:
"""Realistic outdoor temperature for Beijing based on month, hour, and noise.
Uses sinusoidal diurnal pattern with peak at ~15:00 and minimum at ~06:00.
"""
beijing_dt = dt + timedelta(hours=BEIJING_TZ_OFFSET) if dt.tzinfo else dt
month_idx = beijing_dt.month - 1
hour = beijing_dt.hour + beijing_dt.minute / 60.0
avg = MONTHLY_AVG_TEMP[month_idx]
swing = MONTHLY_DIURNAL_SWING[month_idx]
# Sinusoidal with peak at 15:00, minimum at 06:00 (shifted cosine)
diurnal = -swing * math.cos(2 * math.pi * (hour - 15) / 24)
# Day-to-day variation (slow drift)
doy = beijing_dt.timetuple().tm_yday
day_drift = 3.0 * math.sin(doy * 0.7) + 2.0 * math.cos(doy * 1.3)
noise = _gauss(0, 0.5)
return avg + diurnal + day_drift + noise
def outdoor_humidity(dt: datetime) -> float:
"""Outdoor humidity correlated with temperature and season."""
beijing_dt = dt + timedelta(hours=BEIJING_TZ_OFFSET) if dt.tzinfo else dt
month_idx = beijing_dt.month - 1
hour = beijing_dt.hour
base = MONTHLY_AVG_HUMIDITY[month_idx]
# Higher humidity at night, lower during day
diurnal = 8.0 * math.cos(2 * math.pi * (hour - 4) / 24)
noise = _gauss(0, 3)
return max(15, min(95, base + diurnal + noise))
# ---------------------------------------------------------------------------
# PV power model
# ---------------------------------------------------------------------------
def pv_power(dt: datetime, rated_power: float = 110.0,
orientation: str = "south", device_code: str = "") -> float:
"""Calculate realistic PV inverter output.
Args:
dt: UTC datetime
rated_power: Inverter rated power in kW
orientation: 'east', 'west', or 'south' - affects morning/afternoon bias
device_code: Device code for per-inverter variation
Returns:
Power in kW (0 at night, clipped at rated_power)
"""
alt = solar_altitude(dt)
# Night: exactly 0
if alt <= 0:
return 0.0
# Dawn/dusk ramp: gradual startup below 5 degrees altitude
if alt < 5:
ramp_factor = alt / 5.0
else:
ramp_factor = 1.0
# Base clear-sky irradiance (simplified: proportional to sin(altitude))
# With atmosphere correction (air mass)
air_mass = 1.0 / max(math.sin(math.radians(alt)), 0.01)
air_mass = min(air_mass, 40) # cap for very low sun
atmospheric_transmission = 0.7 ** (air_mass ** 0.678) # Meinel model simplified
clear_sky_factor = math.sin(math.radians(alt)) * atmospheric_transmission
# Seasonal factor: panels at fixed tilt (~30 degrees in Beijing)
# Summer sun is higher -> slightly less optimal for tilted panels at noon
# but longer days compensate
doy = _day_of_year(dt)
decl = solar_declination(doy)
# Approximate panel tilt correction
panel_tilt = 30 # degrees
tilt_factor = max(0.5, math.cos(math.radians(abs(alt - (90 - BEIJING_LAT + decl)) * 0.3)))
# Orientation bias
azimuth = solar_azimuth(dt)
if orientation == "east":
# East-facing gets more morning sun
orient_factor = 1.0 + 0.1 * math.cos(math.radians(azimuth - 120))
elif orientation == "west":
# West-facing gets more afternoon sun
orient_factor = 1.0 + 0.1 * math.cos(math.radians(azimuth - 240))
else:
orient_factor = 1.0
# Cloud effect (correlated across all inverters)
cloud = get_cloud_factor(dt)
# Temperature derating
temp = outdoor_temperature(dt)
# Panel temperature is ~20-30C above ambient when producing
panel_temp = temp + 20 + 10 * clear_sky_factor
temp_derate = 1.0 + (-0.004) * max(0, panel_temp - 25) # -0.4%/C above 25C
temp_derate = max(0.75, temp_derate)
# Per-inverter variation (use device code hash for deterministic offset)
if device_code:
inv_hash = hash(device_code) % 1000 / 1000.0
inv_variation = 0.97 + 0.06 * inv_hash # 0.97 to 1.03
else:
inv_variation = 1.0
# Gaussian noise (1-3%)
noise = 1.0 + _gauss(0, 0.015)
# Final power
power = (rated_power * clear_sky_factor * tilt_factor * orient_factor *
cloud * temp_derate * ramp_factor * inv_variation * noise)
# Inverter clipping
power = min(power, rated_power)
power = max(0.0, power)
return round(power, 2)
def get_pv_orientation(device_code: str) -> str:
"""Determine inverter orientation based on device code.
INV-01, INV-02 are east building, INV-03 is west building.
"""
if device_code in ("INV-01", "INV-02"):
return "east"
elif device_code == "INV-03":
return "west"
return "south"
# ---------------------------------------------------------------------------
# Heat pump model
# ---------------------------------------------------------------------------
def get_hvac_mode(month: int) -> str:
"""Determine HVAC operating mode by month."""
if month in (11, 12, 1, 2, 3):
return "heating"
elif month in (6, 7, 8, 9):
return "cooling"
elif month in (4, 5):
return "transition_spring"
else: # Oct
return "transition_fall"
def heat_pump_data(dt: datetime, rated_power: float = 35.0,
device_code: str = "") -> dict:
"""Generate realistic heat pump operating data.
Returns dict with: power, cop, inlet_temp, outlet_temp, flow_rate, outdoor_temp, mode
"""
beijing_dt = dt + timedelta(hours=BEIJING_TZ_OFFSET) if dt.tzinfo else dt
hour = beijing_dt.hour + beijing_dt.minute / 60.0
month = beijing_dt.month
is_weekend = beijing_dt.weekday() >= 5
mode = get_hvac_mode(month)
out_temp = outdoor_temperature(dt)
# COP model: varies with outdoor temperature
cop = 3.0 + 0.05 * (out_temp - 7)
cop = max(2.0, min(5.5, cop))
cop += _gauss(0, 0.1)
cop = max(2.0, min(5.5, cop))
# Operating pattern depends on mode
if mode == "heating":
# Higher demand at night/morning (cold), lower during warmest part of day
if 6 <= hour <= 9:
load_factor = _uniform(0.75, 0.95)
elif 9 <= hour <= 16:
load_factor = _uniform(0.45, 0.65)
elif 16 <= hour <= 22:
load_factor = _uniform(0.65, 0.85)
else: # night
load_factor = _uniform(0.55, 0.75)
if is_weekend:
load_factor *= 0.7
inlet_temp = 35 + _gauss(0, 1.5) # return water
delta_t = _uniform(5, 8)
outlet_temp = inlet_temp + delta_t
elif mode == "cooling":
# Higher demand in afternoon (hot)
if 8 <= hour <= 11:
load_factor = _uniform(0.45, 0.65)
elif 11 <= hour <= 16:
load_factor = _uniform(0.75, 0.95)
elif 16 <= hour <= 19:
load_factor = _uniform(0.60, 0.80)
elif 19 <= hour <= 22:
load_factor = _uniform(0.35, 0.55)
else:
load_factor = _uniform(0.15, 0.30)
if is_weekend:
load_factor *= 0.7
inlet_temp = 12 + _gauss(0, 1.0) # return water (chilled)
delta_t = _uniform(3, 5)
outlet_temp = inlet_temp - delta_t
else: # transition
# Intermittent operation
if _random() < 0.4:
# Off period
return {
"power": 0.0, "cop": 0.0,
"inlet_temp": round(out_temp + _gauss(5, 1), 1),
"outlet_temp": round(out_temp + _gauss(5, 1), 1),
"flow_rate": 0.0, "outdoor_temp": round(out_temp, 1),
"mode": "standby",
}
load_factor = _uniform(0.25, 0.55)
# Could be either heating or cooling depending on temp
if out_temp < 15:
inlet_temp = 32 + _gauss(0, 1.5)
delta_t = _uniform(4, 6)
outlet_temp = inlet_temp + delta_t
else:
inlet_temp = 14 + _gauss(0, 1.0)
delta_t = _uniform(3, 4)
outlet_temp = inlet_temp - delta_t
power = rated_power * load_factor
power += _gauss(0, power * 0.02) # noise
power = max(0, min(rated_power, power))
# Flow rate correlates with power (not random!)
# Higher power -> higher flow for heat transfer
flow_rate = 8 + (power / rated_power) * 7 # 8-15 m3/h range
flow_rate += _gauss(0, 0.3)
flow_rate = max(5, min(18, flow_rate))
# Per-unit variation
if device_code:
unit_offset = (hash(device_code) % 100 - 50) / 500.0 # +/- 10%
power *= (1 + unit_offset)
return {
"power": round(max(0, power), 2),
"cop": round(cop, 2),
"inlet_temp": round(inlet_temp, 1),
"outlet_temp": round(outlet_temp, 1),
"flow_rate": round(flow_rate, 1),
"outdoor_temp": round(out_temp, 1),
"mode": mode,
}
# ---------------------------------------------------------------------------
# Building load (meter) model
# ---------------------------------------------------------------------------
def building_load(dt: datetime, base_power: float = 50.0,
meter_code: str = "") -> dict:
"""Generate realistic building electrical load.
Returns dict with: power, voltage, current, power_factor
"""
beijing_dt = dt + timedelta(hours=BEIJING_TZ_OFFSET) if dt.tzinfo else dt
hour = beijing_dt.hour + beijing_dt.minute / 60.0
month = beijing_dt.month
is_weekend = beijing_dt.weekday() >= 5
# Base load profile
if is_weekend:
# Weekend: much lower, no office activity
if 8 <= hour <= 18:
load_factor = _uniform(0.35, 0.50)
else:
load_factor = _uniform(0.25, 0.35)
else:
# Weekday office pattern
if hour < 6:
load_factor = _uniform(0.25, 0.35) # night minimum (security, servers)
elif 6 <= hour < 8:
# Morning ramp-up
ramp = (hour - 6) / 2.0
load_factor = _uniform(0.35, 0.50) + ramp * 0.3
elif 8 <= hour < 12:
load_factor = _uniform(0.75, 0.95) # morning work
elif 12 <= hour < 13:
load_factor = _uniform(0.55, 0.70) # lunch dip
elif 13 <= hour < 18:
load_factor = _uniform(0.80, 1.0) # afternoon peak
elif 18 <= hour < 19:
# Evening ramp-down
ramp = (19 - hour)
load_factor = _uniform(0.50, 0.65) + ramp * 0.2
elif 19 <= hour < 22:
load_factor = _uniform(0.35, 0.50) # evening
else:
load_factor = _uniform(0.25, 0.35) # night
# HVAC seasonal contribution
hvac_mode = get_hvac_mode(month)
if hvac_mode == "heating":
hvac_add = _uniform(0.10, 0.20)
elif hvac_mode == "cooling":
hvac_add = _uniform(0.15, 0.25)
else:
hvac_add = _uniform(0.03, 0.08)
# Random load events (elevator, kitchen, EV charging)
spike = 0.0
if _random() < 0.08: # ~8% chance per reading
spike = _uniform(5, 25) # kW spike
power = base_power * (load_factor + hvac_add) + spike
# Minimum night base load (security, servers, emergency lighting)
min_load = 15 + _gauss(0, 1)
power = max(min_load, power)
# Noise
power += _gauss(0, power * 0.015)
power = max(0, power)
# Voltage (realistic grid: 220V +/- 5%)
voltage = 220 + _gauss(0, 2.0)
voltage = max(209, min(231, voltage))
# Power factor
pf = _uniform(0.88, 0.96)
if 8 <= hour <= 18 and not is_weekend:
pf = _uniform(0.90, 0.97) # better during office hours (capacitor bank)
# Current derived from power
current = power / (voltage * math.sqrt(3) * pf / 1000) # 3-phase
# Per-meter variation
if meter_code == "METER-GRID":
pass # main meter, use as-is
elif meter_code == "METER-PV":
# PV meter shows generation, not load — handled separately
pass
elif meter_code == "METER-HP":
power *= _uniform(0.2, 0.35) # heat pump subset of total
elif meter_code == "METER-PUMP":
power *= _uniform(0.05, 0.12) # circulation pumps
return {
"power": round(power, 2),
"voltage": round(voltage, 1),
"current": round(current, 1),
"power_factor": round(pf, 3),
}
# ---------------------------------------------------------------------------
# Sensor model
# ---------------------------------------------------------------------------
def indoor_sensor(dt: datetime, is_outdoor: bool = False,
device_code: str = "") -> dict:
"""Generate realistic temperature and humidity sensor data.
Returns dict with: temperature, humidity
"""
beijing_dt = dt + timedelta(hours=BEIJING_TZ_OFFSET) if dt.tzinfo else dt
hour = beijing_dt.hour + beijing_dt.minute / 60.0
month = beijing_dt.month
is_weekend = beijing_dt.weekday() >= 5
if is_outdoor:
temp = outdoor_temperature(dt)
hum = outdoor_humidity(dt)
return {"temperature": round(temp, 1), "humidity": round(hum, 1)}
# Indoor: HVAC controlled during office hours
hvac_mode = get_hvac_mode(month)
if not is_weekend and 7 <= hour <= 19:
# HVAC on: well-controlled
if hvac_mode == "heating":
temp = _uniform(20.5, 23.5)
elif hvac_mode == "cooling":
temp = _uniform(23.0, 25.5)
else:
temp = _uniform(21.0, 25.0)
hum = _uniform(40, 55)
else:
# HVAC off or weekend: drifts toward outdoor
out_temp = outdoor_temperature(dt)
if hvac_mode == "heating":
# Indoor cools slowly without heating
temp = max(16, min(22, 22 - (22 - out_temp) * 0.15))
elif hvac_mode == "cooling":
# Indoor warms slowly without cooling
temp = min(30, max(24, 24 + (out_temp - 24) * 0.15))
else:
temp = 20 + (out_temp - 15) * 0.2
hum = _uniform(35, 65)
# Summer monsoon: higher indoor humidity without dehumidification
if month in (7, 8) and is_weekend:
hum = _uniform(55, 75)
# Per-sensor variation (different rooms have slightly different temps)
if device_code:
room_offset = (hash(device_code) % 100 - 50) / 100.0 # +/- 0.5C
temp += room_offset
temp += _gauss(0, 0.2)
hum += _gauss(0, 1.5)
return {
"temperature": round(temp, 1),
"humidity": round(max(15, min(95, hum)), 1),
}
# ---------------------------------------------------------------------------
# Heat meter model
# ---------------------------------------------------------------------------
def heat_meter_data(dt: datetime, hp_power: float = 0, hp_cop: float = 3.0) -> dict:
"""Generate heat meter readings correlated with heat pump operation.
Args:
hp_power: Total heat pump electrical power (sum of all units) in kW
hp_cop: Average COP of operating heat pumps
Returns dict with: heat_power, flow_rate, supply_temp, return_temp
"""
# Heat output = electrical input * COP * efficiency loss
heat_power = hp_power * hp_cop * _uniform(0.88, 0.95)
if heat_power < 1:
return {
"heat_power": 0.0,
"flow_rate": 0.0,
"supply_temp": round(outdoor_temperature(dt) + _gauss(5, 1), 1),
"return_temp": round(outdoor_temperature(dt) + _gauss(5, 1), 1),
}
beijing_dt = dt + timedelta(hours=BEIJING_TZ_OFFSET) if dt.tzinfo else dt
mode = get_hvac_mode(beijing_dt.month)
if mode == "heating":
supply_temp = 42 + _gauss(0, 1.5)
return_temp = supply_temp - _uniform(5, 8)
elif mode == "cooling":
supply_temp = 7 + _gauss(0, 0.8)
return_temp = supply_temp + _uniform(3, 5)
else:
supply_temp = 30 + _gauss(0, 2)
return_temp = supply_temp - _uniform(3, 5)
# Flow rate derived from heat and delta-T
delta_t = abs(supply_temp - return_temp)
if delta_t > 0.5:
# Q = m * cp * dT => m = Q / (cp * dT)
# cp of water ~4.186 kJ/kgK, 1 m3 = 1000 kg
flow_rate = heat_power / (4.186 * delta_t) * 3.6 # m3/h
else:
flow_rate = _uniform(5, 10)
flow_rate += _gauss(0, 0.2)
return {
"heat_power": round(max(0, heat_power), 2),
"flow_rate": round(max(0, flow_rate), 1),
"supply_temp": round(supply_temp, 1),
"return_temp": round(return_temp, 1),
}
# ---------------------------------------------------------------------------
# Communication glitch model
# ---------------------------------------------------------------------------
def should_skip_reading(cycle_count: int = 0) -> bool:
"""Simulate occasional communication glitches.
~1% chance of skipping a reading.
"""
return _random() < 0.01
def should_go_offline() -> bool:
"""Simulate brief device offline events.
~0.1% chance per cycle (roughly once every few hours at 15s intervals).
"""
return _random() < 0.001
# ---------------------------------------------------------------------------
# PV electrical details
# ---------------------------------------------------------------------------
def pv_electrical(power: float, rated_power: float = 110.0) -> dict:
"""Generate realistic PV electrical measurements."""
if power <= 0:
return {
"dc_voltage": 0.0,
"ac_voltage": round(220 + _gauss(0, 1), 1),
"temperature": round(outdoor_temperature(datetime.now(timezone.utc)) + _gauss(0, 2), 1),
}
load_ratio = power / rated_power
# DC voltage: MPPT tracking range 200-850V, higher at higher power
dc_voltage = 450 + 200 * load_ratio + _gauss(0, 15)
dc_voltage = max(200, min(850, dc_voltage))
# AC voltage: grid-tied, very stable
ac_voltage = 220 + _gauss(0, 1.5)
# Inverter temperature: ambient + load-dependent heating
inv_temp = outdoor_temperature(datetime.now(timezone.utc)) + 15 + 20 * load_ratio
inv_temp += _gauss(0, 1.5)
return {
"dc_voltage": round(dc_voltage, 1),
"ac_voltage": round(ac_voltage, 1),
"temperature": round(inv_temp, 1),
}
def pv_electrical_at(power: float, dt: datetime, rated_power: float = 110.0) -> dict:
"""Generate PV electrical measurements for a specific time (backfill)."""
if power <= 0:
return {
"dc_voltage": 0.0,
"ac_voltage": round(220 + _gauss(0, 1), 1),
"temperature": round(outdoor_temperature(dt) + _gauss(0, 2), 1),
}
load_ratio = power / rated_power
dc_voltage = 450 + 200 * load_ratio + _gauss(0, 15)
dc_voltage = max(200, min(850, dc_voltage))
ac_voltage = 220 + _gauss(0, 1.5)
inv_temp = outdoor_temperature(dt) + 15 + 20 * load_ratio + _gauss(0, 1.5)
return {
"dc_voltage": round(dc_voltage, 1),
"ac_voltage": round(ac_voltage, 1),
"temperature": round(inv_temp, 1),
}

View File

@@ -0,0 +1,229 @@
"""气象数据融合服务 - 天气API集成、模拟数据生成、缓存"""
import logging
import math
from datetime import datetime, timedelta, timezone
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, and_, desc
from app.models.weather import WeatherData, WeatherConfig
from app.services.weather_model import (
outdoor_temperature, outdoor_humidity, solar_altitude,
get_cloud_factor, BEIJING_TZ_OFFSET, MONTHLY_AVG_TEMP,
)
logger = logging.getLogger("weather_service")
BJT = timezone(timedelta(hours=8))
def generate_mock_weather(dt: datetime) -> dict:
"""Generate mock weather data based on weather_model patterns."""
temp = outdoor_temperature(dt)
humidity = outdoor_humidity(dt)
# Solar radiation based on altitude
alt = solar_altitude(dt)
if alt > 0:
cloud = get_cloud_factor(dt)
# Clear-sky irradiance ~ 1000 * sin(altitude) * cloud_factor
solar_radiation = 1000 * math.sin(math.radians(alt)) * cloud * 0.85
solar_radiation = max(0, solar_radiation)
cloud_cover = (1 - cloud) * 100
else:
solar_radiation = 0
cloud_cover = 0
# Wind speed model - seasonal + random
beijing_dt = dt + timedelta(hours=BEIJING_TZ_OFFSET) if dt.tzinfo else dt
month = beijing_dt.month
# Spring is windier in Beijing
base_wind = {1: 2.5, 2: 3.0, 3: 4.0, 4: 4.5, 5: 3.5, 6: 2.5,
7: 2.0, 8: 2.0, 9: 2.5, 10: 3.0, 11: 3.0, 12: 2.5}.get(month, 2.5)
# Diurnal: windier during afternoon
hour = beijing_dt.hour
diurnal_wind = 0.5 * math.sin(math.pi * (hour - 6) / 12) if 6 <= hour <= 18 else -0.3
wind_speed = max(0.1, base_wind + diurnal_wind)
return {
"temperature": round(temp, 1),
"humidity": round(humidity, 1),
"solar_radiation": round(solar_radiation, 1),
"cloud_cover": round(max(0, min(100, cloud_cover)), 1),
"wind_speed": round(wind_speed, 1),
}
async def get_current_weather(db: AsyncSession) -> dict:
"""Get current weather - from cache or generate mock."""
now = datetime.now(timezone.utc)
# Try cache first (within last 15 minutes)
cache_cutoff = now - timedelta(minutes=15)
q = select(WeatherData).where(
and_(
WeatherData.data_type == "observation",
WeatherData.fetched_at >= cache_cutoff,
)
).order_by(desc(WeatherData.fetched_at)).limit(1)
result = await db.execute(q)
cached = result.scalar_one_or_none()
if cached:
return {
"timestamp": str(cached.timestamp),
"temperature": cached.temperature,
"humidity": cached.humidity,
"solar_radiation": cached.solar_radiation,
"cloud_cover": cached.cloud_cover,
"wind_speed": cached.wind_speed,
"source": cached.source,
}
# Generate mock data
mock = generate_mock_weather(now)
weather = WeatherData(
timestamp=now,
data_type="observation",
temperature=mock["temperature"],
humidity=mock["humidity"],
solar_radiation=mock["solar_radiation"],
cloud_cover=mock["cloud_cover"],
wind_speed=mock["wind_speed"],
source="mock",
)
db.add(weather)
return {
"timestamp": str(now),
**mock,
"source": "mock",
}
async def get_forecast(db: AsyncSession, hours: int = 72) -> list[dict]:
"""Get weather forecast for the next N hours."""
now = datetime.now(timezone.utc)
forecasts = []
for h in range(0, hours, 3): # 3-hour intervals
dt = now + timedelta(hours=h)
mock = generate_mock_weather(dt)
forecasts.append({
"timestamp": str(dt),
"hours_ahead": h,
**mock,
})
return forecasts
async def get_weather_history(
db: AsyncSession, start_date: datetime, end_date: datetime,
) -> list[dict]:
"""Get historical weather data."""
q = select(WeatherData).where(
and_(
WeatherData.timestamp >= start_date,
WeatherData.timestamp <= end_date,
)
).order_by(WeatherData.timestamp)
result = await db.execute(q)
records = result.scalars().all()
if records:
return [
{
"timestamp": str(r.timestamp),
"temperature": r.temperature,
"humidity": r.humidity,
"solar_radiation": r.solar_radiation,
"cloud_cover": r.cloud_cover,
"wind_speed": r.wind_speed,
"source": r.source,
}
for r in records
]
# Generate mock historical data if none cached
history = []
dt = start_date
while dt <= end_date:
mock = generate_mock_weather(dt)
history.append({"timestamp": str(dt), **mock, "source": "mock"})
dt += timedelta(hours=1)
return history
async def get_weather_impact(db: AsyncSession, days: int = 30) -> dict:
"""Analyze weather impact on energy consumption and PV generation."""
now = datetime.now(timezone.utc)
start = now - timedelta(days=days)
# Generate sample correlation data
temp_ranges = [
{"range": "< 0C", "min": -10, "max": 0, "avg_consumption": 850, "pv_generation": 180},
{"range": "0-10C", "min": 0, "max": 10, "avg_consumption": 720, "pv_generation": 220},
{"range": "10-20C", "min": 10, "max": 20, "avg_consumption": 550, "pv_generation": 310},
{"range": "20-30C", "min": 20, "max": 30, "avg_consumption": 680, "pv_generation": 380},
{"range": "> 30C", "min": 30, "max": 40, "avg_consumption": 780, "pv_generation": 350},
]
# Solar radiation vs PV output correlation
solar_correlation = []
for rad in range(0, 1001, 100):
# PV output roughly proportional to radiation with some losses
pv_output = rad * 0.33 * 0.85 # 330kWp * 85% efficiency
solar_correlation.append({
"solar_radiation": rad,
"pv_output_kw": round(pv_output, 1),
})
return {
"analysis_period_days": days,
"temperature_impact": temp_ranges,
"solar_correlation": solar_correlation,
"key_findings": [
"采暖季(11-3月)温度每降低1C,热泵能耗增加约3%",
"太阳辐射与光伏产出呈强正相关(R2=0.92)",
"多云天气光伏产出下降30-50%",
"春季大风天气对能耗影响较小,但对光伏面板散热有利",
],
}
async def get_weather_config(db: AsyncSession) -> dict:
"""Get weather API configuration."""
result = await db.execute(select(WeatherConfig).limit(1))
config = result.scalar_one_or_none()
if not config:
return {
"api_provider": "mock",
"location_lat": 39.9,
"location_lon": 116.4,
"fetch_interval_minutes": 30,
"is_enabled": True,
}
return {
"id": config.id,
"api_provider": config.api_provider,
"location_lat": config.location_lat,
"location_lon": config.location_lon,
"fetch_interval_minutes": config.fetch_interval_minutes,
"is_enabled": config.is_enabled,
}
async def update_weather_config(db: AsyncSession, data: dict) -> dict:
"""Update weather API configuration."""
result = await db.execute(select(WeatherConfig).limit(1))
config = result.scalar_one_or_none()
if not config:
config = WeatherConfig()
db.add(config)
for key in ("api_provider", "api_key", "location_lat", "location_lon",
"fetch_interval_minutes", "is_enabled"):
if key in data:
setattr(config, key, data[key])
return {"message": "气象配置更新成功"}