diff --git a/backend/app/api/router.py b/backend/app/api/router.py index c2bae8c..366f61b 100644 --- a/backend/app/api/router.py +++ b/backend/app/api/router.py @@ -1,5 +1,5 @@ from fastapi import APIRouter -from app.api.v1 import auth, users, devices, energy, monitoring, alarms, reports, carbon, dashboard, collectors, websocket, audit, settings, charging, quota, cost, maintenance, management, prediction, energy_strategy, weather, ai_ops +from app.api.v1 import auth, users, devices, energy, monitoring, alarms, reports, carbon, dashboard, collectors, websocket, audit, settings, charging, quota, cost, maintenance, management, prediction, energy_strategy, weather, ai_ops, branding api_router = APIRouter(prefix="/api/v1") @@ -25,3 +25,4 @@ api_router.include_router(prediction.router) api_router.include_router(energy_strategy.router) api_router.include_router(weather.router) api_router.include_router(ai_ops.router) +api_router.include_router(branding.router) diff --git a/backend/app/api/v1/branding.py b/backend/app/api/v1/branding.py new file mode 100644 index 0000000..b13fafa --- /dev/null +++ b/backend/app/api/v1/branding.py @@ -0,0 +1,20 @@ +from fastapi import APIRouter +from app.core.config import get_settings + +router = APIRouter(prefix="/branding", tags=["品牌配置"]) + + +@router.get("") +async def get_branding(): + """Return customer-specific branding configuration""" + settings = get_settings() + customer_config = settings.load_customer_config() + return { + "customer": settings.CUSTOMER, + "customer_name": customer_config.get("customer_name", settings.CUSTOMER), + "platform_name": customer_config.get("platform_name", settings.APP_NAME), + "platform_name_en": customer_config.get("platform_name_en", "Smart EMS"), + "logo_url": customer_config.get("logo_url", ""), + "theme_color": customer_config.get("theme_color", "#1890ff"), + "features": customer_config.get("features", {}), + } diff --git a/backend/app/collectors/manager.py b/backend/app/collectors/manager.py index 22c8fdf..5707869 100644 --- a/backend/app/collectors/manager.py +++ b/backend/app/collectors/manager.py @@ -4,6 +4,7 @@ from typing import Optional from sqlalchemy import select +from app.core.config import get_settings from app.core.database import async_session from app.models.device import Device from app.collectors.base import BaseCollector @@ -13,7 +14,7 @@ from app.collectors.http_collector import HttpCollector logger = logging.getLogger("collector.manager") -# Registry mapping protocol names to collector classes +# Full registry mapping protocol names to collector classes COLLECTOR_REGISTRY: dict[str, type[BaseCollector]] = { "modbus_tcp": ModbusTcpCollector, "mqtt": MqttCollector, @@ -21,6 +22,26 @@ COLLECTOR_REGISTRY: dict[str, type[BaseCollector]] = { } +def get_enabled_collectors() -> dict[str, type[BaseCollector]]: + """Return collector registry filtered by customer config. + + If the customer config specifies a 'collectors' list, only those + protocols are enabled. Otherwise fall back to the full registry. + """ + settings = get_settings() + customer_config = settings.load_customer_config() + enabled_list = customer_config.get("collectors") + if enabled_list is None: + return COLLECTOR_REGISTRY + enabled = {} + for name in enabled_list: + if name in COLLECTOR_REGISTRY: + enabled[name] = COLLECTOR_REGISTRY[name] + else: + logger.warning("Customer config references unknown collector '%s', skipping", name) + return enabled + + class CollectorManager: """Manages lifecycle of all device collectors.""" @@ -47,11 +68,13 @@ class CollectorManager: async def _load_and_start_collectors(self): """Load active devices with supported protocols and start collectors.""" + enabled = get_enabled_collectors() + logger.info("Enabled collectors: %s", list(enabled.keys())) async with async_session() as session: result = await session.execute( select(Device).where( Device.is_active == True, - Device.protocol.in_(list(COLLECTOR_REGISTRY.keys())), + Device.protocol.in_(list(enabled.keys())), ) ) devices = result.scalars().all() diff --git a/backend/app/collectors/sungrow_collector.py b/backend/app/collectors/sungrow_collector.py new file mode 100644 index 0000000..5dc885e --- /dev/null +++ b/backend/app/collectors/sungrow_collector.py @@ -0,0 +1,204 @@ +"""阳光电源 iSolarCloud API 数据采集器""" +import time +from datetime import datetime, timezone +from typing import Optional + +import httpx + +from app.collectors.base import BaseCollector + + +class SungrowCollector(BaseCollector): + """Collect data from Sungrow inverters via iSolarCloud OpenAPI. + + connection_params example: + { + "api_base": "https://gateway.isolarcloud.com", + "app_key": "1BF313B6A9F919A6FB6A90BD43D23395", + "sys_code": "901", + "x_access_key": "qpthtsf287zvtmr6t3q9hsc0k70f3tay", + "user_account": "13911211695", + "user_password": "123456#ABC", + "ps_id": "power_station_id", + "device_sn": "optional_device_serial" + } + """ + + TOKEN_LIFETIME = 23 * 3600 # Refresh before 24h expiry + + def __init__(self, device_id, device_code, connection_params, collect_interval=900): + super().__init__(device_id, device_code, connection_params, collect_interval) + self._api_base = connection_params.get("api_base", "https://gateway.isolarcloud.com").rstrip("/") + self._app_key = connection_params.get("app_key", "") + self._sys_code = connection_params.get("sys_code", "901") + self._x_access_key = connection_params.get("x_access_key", "") + self._user_account = connection_params.get("user_account", "") + self._user_password = connection_params.get("user_password", "") + self._ps_id = connection_params.get("ps_id", "") + self._device_sn = connection_params.get("device_sn", "") + self._client: Optional[httpx.AsyncClient] = None + self._token: Optional[str] = None + self._token_obtained_at: float = 0 + + async def connect(self): + """Establish HTTP client and authenticate with iSolarCloud.""" + self._client = httpx.AsyncClient(timeout=30) + await self._login() + self.logger.info("Authenticated with iSolarCloud for %s", self.device_code) + + async def disconnect(self): + """Close HTTP client.""" + if self._client: + await self._client.aclose() + self._client = None + self._token = None + + async def collect(self) -> dict: + """Collect real-time data from the Sungrow inverter. + + Returns a dict mapping data_type -> (value, unit). + """ + if not self._client: + raise ConnectionError("HTTP client not initialized") + + # Refresh token if close to expiry + if self._token_needs_refresh(): + await self._login() + + data = {} + + # Fetch power station overview for power/energy data + if self._ps_id: + ps_data = await self._get_station_data() + if ps_data: + data.update(ps_data) + + # Fetch device list for per-device metrics + if self._ps_id: + dev_data = await self._get_device_data() + if dev_data: + data.update(dev_data) + + return data + + # ------------------------------------------------------------------ + # Internal API methods + # ------------------------------------------------------------------ + + async def _login(self): + """POST /openapi/login to obtain access token.""" + payload = { + "appkey": self._app_key, + "sys_code": self._sys_code, + "user_account": self._user_account, + "user_password": self._user_password, + } + result = await self._api_call("/openapi/login", payload, auth=False) + + token = result.get("token") + if not token: + raise ConnectionError(f"Login failed: {result.get('msg', 'no token returned')}") + + self._token = token + self._token_obtained_at = time.monotonic() + self.logger.info("iSolarCloud login successful for account %s", self._user_account) + + async def _get_station_data(self) -> dict: + """Fetch power station real-time data.""" + payload = {"ps_id": self._ps_id} + result = await self._api_call("/openapi/getPowerStationList", payload) + + data = {} + stations = result.get("pageList", []) + for station in stations: + if str(station.get("ps_id")) == str(self._ps_id): + # Map station-level fields + if "curr_power" in station: + data["power"] = (float(station["curr_power"]), "kW") + if "today_energy" in station: + data["daily_energy"] = (float(station["today_energy"]), "kWh") + if "total_energy" in station: + data["total_energy"] = (float(station["total_energy"]), "kWh") + break + + return data + + async def _get_device_data(self) -> dict: + """Fetch device-level real-time data for the target inverter.""" + payload = {"ps_id": self._ps_id} + result = await self._api_call("/openapi/getDeviceList", payload) + + data = {} + devices = result.get("pageList", []) + for device in devices: + # Match by serial number if specified, otherwise use first inverter + if self._device_sn and device.get("device_sn") != self._device_sn: + continue + + device_type = device.get("device_type", 0) + # device_type 1 = inverter in Sungrow API + if device_type in (1, "1") or not self._device_sn: + if "device_power" in device: + data["power"] = (float(device["device_power"]), "kW") + if "today_energy" in device: + data["daily_energy"] = (float(device["today_energy"]), "kWh") + if "total_energy" in device: + data["total_energy"] = (float(device["total_energy"]), "kWh") + if "temperature" in device: + data["temperature"] = (float(device["temperature"]), "°C") + if "dc_voltage" in device: + data["voltage"] = (float(device["dc_voltage"]), "V") + if "ac_current" in device: + data["current"] = (float(device["ac_current"]), "A") + if "frequency" in device: + data["frequency"] = (float(device["frequency"]), "Hz") + if self._device_sn: + break + + return data + + async def _api_call(self, path: str, payload: dict, auth: bool = True) -> dict: + """Make an API call to iSolarCloud. + + Args: + path: API endpoint path (e.g. /openapi/login). + payload: Request body parameters. + auth: Whether to include the auth token. + + Returns: + The 'result_data' dict from the response, or raises on error. + """ + url = f"{self._api_base}{path}" + headers = { + "Content-Type": "application/json", + "x-access-key": self._x_access_key, + "sys_code": self._sys_code, + } + if auth and self._token: + headers["token"] = self._token + + body = { + "appkey": self._app_key, + "lang": "_zh_CN", + **payload, + } + + self.logger.debug("API call: %s %s", "POST", url) + response = await self._client.post(url, json=body, headers=headers) + response.raise_for_status() + + resp_json = response.json() + result_code = resp_json.get("result_code", -1) + if result_code != 1 and str(result_code) != "1": + msg = resp_json.get("result_msg", "Unknown error") + self.logger.error("API error on %s: code=%s msg=%s", path, result_code, msg) + raise RuntimeError(f"iSolarCloud API error: {msg} (code={result_code})") + + return resp_json.get("result_data", {}) + + def _token_needs_refresh(self) -> bool: + """Check if the token is close to expiry.""" + if not self._token: + return True + elapsed = time.monotonic() - self._token_obtained_at + return elapsed >= self.TOKEN_LIFETIME diff --git a/backend/app/core/config.py b/backend/app/core/config.py index c9f20a4..eceea05 100644 --- a/backend/app/core/config.py +++ b/backend/app/core/config.py @@ -2,12 +2,18 @@ from pydantic_settings import BaseSettings from functools import lru_cache import os +import yaml + class Settings(BaseSettings): APP_NAME: str = "TianpuEMS" DEBUG: bool = True API_V1_PREFIX: str = "/api/v1" + # Customer configuration + CUSTOMER: str = "tianpu" # tianpu, zpark, etc. + CUSTOMER_DISPLAY_NAME: str = "" # Loaded from customer config + # Database: set DATABASE_URL in .env to override. # Default: SQLite for local dev. Docker sets PostgreSQL via env var. # Examples: @@ -50,6 +56,19 @@ class Settings(BaseSettings): def is_sqlite(self) -> bool: return "sqlite" in self.DATABASE_URL + @property + def customer_config_path(self) -> str: + return os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), + "..", "customers", self.CUSTOMER) + + def load_customer_config(self) -> dict: + """Load customer-specific config from customers/{CUSTOMER}/config.yaml""" + config_file = os.path.join(self.customer_config_path, "config.yaml") + if os.path.exists(config_file): + with open(config_file, 'r', encoding='utf-8') as f: + return yaml.safe_load(f) or {} + return {} + class Config: env_file = ".env" extra = "ignore" diff --git a/backend/app/main.py b/backend/app/main.py index 6df4a6f..db6a6ab 100644 --- a/backend/app/main.py +++ b/backend/app/main.py @@ -17,6 +17,7 @@ from app.collectors.manager import CollectorManager from app.collectors.queue import IngestionWorker settings = get_settings() +customer_config = settings.load_customer_config() simulator = DataSimulator() collector_manager: Optional[CollectorManager] = None ingestion_worker: Optional[IngestionWorker] = None @@ -28,6 +29,9 @@ logger = logging.getLogger("app") async def lifespan(app: FastAPI): global collector_manager, ingestion_worker + logger.info("Loading customer: %s (%s)", settings.CUSTOMER, + customer_config.get("customer_name", settings.CUSTOMER)) + # Initialize Redis cache if settings.REDIS_ENABLED: redis = await get_redis() @@ -80,15 +84,19 @@ async def lifespan(app: FastAPI): app = FastAPI( - title="天普零碳园区智慧能源管理平台", - description="Tianpu Zero-Carbon Park Smart Energy Management System", + title=customer_config.get("platform_name", "天普零碳园区智慧能源管理平台"), + description=customer_config.get("platform_name_en", "Tianpu Zero-Carbon Park Smart Energy Management System"), version="1.0.0", lifespan=lifespan, ) +_default_origins = ["http://localhost:3000", "http://localhost:5173", "http://127.0.0.1:3000", "http://127.0.0.1:5173"] +_customer_origins = customer_config.get("cors_origins", []) +_cors_origins = list(set(_default_origins + _customer_origins)) + app.add_middleware( CORSMiddleware, - allow_origins=["http://localhost:3000", "http://localhost:5173", "http://127.0.0.1:3000", "http://127.0.0.1:5173"], + allow_origins=_cors_origins, allow_credentials=True, allow_methods=["*"], allow_headers=["*"], diff --git a/backend/requirements.txt b/backend/requirements.txt index 4d084b5..2793ded 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -23,3 +23,4 @@ pytest==8.3.4 pytest-asyncio==0.25.0 pytest-cov==6.0.0 aiosqlite==0.20.0 +pyyaml>=6.0 diff --git a/customers/tianpu/config.yaml b/customers/tianpu/config.yaml new file mode 100644 index 0000000..b5bfbdb --- /dev/null +++ b/customers/tianpu/config.yaml @@ -0,0 +1,17 @@ +# 天普大兴园区 - 客户配置 +customer_name: "天普新能源" +platform_name: "天普零碳园区智慧能源管理平台" +platform_name_en: "Tianpu Zero-Carbon Park Smart EMS" +logo_url: "/static/logo-tianpu.png" +theme_color: "#1890ff" +cors_origins: + - "http://localhost:3000" + - "http://localhost:5173" +collectors: + - modbus_tcp + - mqtt + - http_api +features: + charging: true + carbon: true + bigscreen_3d: true diff --git a/customers/zpark/config.yaml b/customers/zpark/config.yaml new file mode 100644 index 0000000..0e87146 --- /dev/null +++ b/customers/zpark/config.yaml @@ -0,0 +1,15 @@ +# 中关村医疗器械园 - 客户配置 +customer_name: "中关村医疗器械园" +platform_name: "中关村医疗器械园智慧能源管理平台" +platform_name_en: "Z-Park Medical Device Smart EMS" +logo_url: "/static/logo-zpark.png" +theme_color: "#52c41a" +cors_origins: + - "http://localhost:3000" + - "http://localhost:5173" +collectors: + - sungrow_api +features: + charging: false + carbon: true + bigscreen_3d: false diff --git a/customers/zpark/devices.json b/customers/zpark/devices.json new file mode 100644 index 0000000..9702291 --- /dev/null +++ b/customers/zpark/devices.json @@ -0,0 +1,338 @@ +{ + "customer": { + "name": "中关村医疗器械园", + "code": "zpark", + "location": "北京市海淀区" + }, + "device_types": [ + { + "code": "sungrow_inverter", + "name": "阳光电源组串式逆变器", + "icon": "solar-panel", + "data_fields": ["power", "daily_energy", "total_energy", "voltage", "current", "frequency", "temperature"] + }, + { + "code": "dc_combiner", + "name": "直流汇流箱", + "icon": "combiner-box", + "data_fields": ["voltage", "current", "power", "string_current"] + }, + { + "code": "pv_panel_group", + "name": "光伏组件组", + "icon": "pv-panel", + "data_fields": ["power", "energy"] + } + ], + "device_groups": [ + { + "name": "中关村医疗器械园", + "location": "北京市海淀区", + "description": "中关村医疗器械园光伏项目总节点", + "children": [ + { + "name": "一期-26号楼", + "location": "26号楼屋顶", + "description": "一期项目,26号楼屋顶光伏" + }, + { + "name": "二期-69号", + "location": "69号区域", + "description": "二期项目,69号区域多栋楼屋顶光伏", + "children": [ + {"name": "1#楼", "location": "1#楼屋顶"}, + {"name": "2#楼", "location": "2#楼屋顶"}, + {"name": "4#楼", "location": "4#楼屋顶"}, + {"name": "5#楼", "location": "5#楼屋顶"}, + {"name": "7#楼", "location": "7#楼屋顶"}, + {"name": "12#楼", "location": "12#楼屋顶"} + ] + } + ] + } + ], + "devices": [ + { + "name": "AP101组串式逆变器", + "code": "ZP-INV-AP101", + "device_type": "sungrow_inverter", + "group": "一期-26号楼", + "rated_power": 40, + "model": "SG40KTL-M", + "manufacturer": "阳光电源", + "protocol": "http_api", + "collect_interval": 900, + "connection_params": { + "api_base": "https://gateway.isolarcloud.com", + "app_key": "1BF313B6A9F919A6FB6A90BD43D23395", + "sys_code": "901", + "x_access_key": "qpthtsf287zvtmr6t3q9hsc0k70f3tay", + "user_account": "13911211695", + "user_password": "123456#ABC", + "ps_id": "", + "device_sn": "AP101" + } + }, + { + "name": "AP102组串式逆变器", + "code": "ZP-INV-AP102", + "device_type": "sungrow_inverter", + "group": "一期-26号楼", + "rated_power": 50, + "model": "SG50KTL-M", + "manufacturer": "阳光电源", + "protocol": "http_api", + "collect_interval": 900, + "connection_params": { + "api_base": "https://gateway.isolarcloud.com", + "app_key": "1BF313B6A9F919A6FB6A90BD43D23395", + "sys_code": "901", + "x_access_key": "qpthtsf287zvtmr6t3q9hsc0k70f3tay", + "user_account": "13911211695", + "user_password": "123456#ABC", + "ps_id": "", + "device_sn": "AP102" + } + }, + { + "name": "AP201组串式逆变器", + "code": "ZP-INV-AP201", + "device_type": "sungrow_inverter", + "group": "1#楼", + "rated_power": 130, + "model": "SG125HV", + "manufacturer": "阳光电源", + "protocol": "http_api", + "collect_interval": 900, + "connection_params": { + "api_base": "https://gateway.isolarcloud.com", + "app_key": "1BF313B6A9F919A6FB6A90BD43D23395", + "sys_code": "901", + "x_access_key": "qpthtsf287zvtmr6t3q9hsc0k70f3tay", + "user_account": "13911211695", + "user_password": "123456#ABC", + "ps_id": "", + "device_sn": "AP201" + } + }, + { + "name": "AP202组串式逆变器", + "code": "ZP-INV-AP202", + "device_type": "sungrow_inverter", + "group": "2#楼", + "rated_power": 260, + "model": "SG250HX", + "manufacturer": "阳光电源", + "protocol": "http_api", + "collect_interval": 900, + "connection_params": { + "api_base": "https://gateway.isolarcloud.com", + "app_key": "1BF313B6A9F919A6FB6A90BD43D23395", + "sys_code": "901", + "x_access_key": "qpthtsf287zvtmr6t3q9hsc0k70f3tay", + "user_account": "13911211695", + "user_password": "123456#ABC", + "ps_id": "", + "device_sn": "AP202" + } + }, + { + "name": "AP203组串式逆变器", + "code": "ZP-INV-AP203", + "device_type": "sungrow_inverter", + "group": "4#楼", + "rated_power": 160, + "model": "SG160HX", + "manufacturer": "阳光电源", + "protocol": "http_api", + "collect_interval": 900, + "connection_params": { + "api_base": "https://gateway.isolarcloud.com", + "app_key": "1BF313B6A9F919A6FB6A90BD43D23395", + "sys_code": "901", + "x_access_key": "qpthtsf287zvtmr6t3q9hsc0k70f3tay", + "user_account": "13911211695", + "user_password": "123456#ABC", + "ps_id": "", + "device_sn": "AP203" + } + }, + { + "name": "AP204组串式逆变器", + "code": "ZP-INV-AP204", + "device_type": "sungrow_inverter", + "group": "5#楼", + "rated_power": 400, + "model": "SG350HX", + "manufacturer": "阳光电源", + "protocol": "http_api", + "collect_interval": 900, + "connection_params": { + "api_base": "https://gateway.isolarcloud.com", + "app_key": "1BF313B6A9F919A6FB6A90BD43D23395", + "sys_code": "901", + "x_access_key": "qpthtsf287zvtmr6t3q9hsc0k70f3tay", + "user_account": "13911211695", + "user_password": "123456#ABC", + "ps_id": "", + "device_sn": "AP204" + } + }, + { + "name": "AP205组串式逆变器", + "code": "ZP-INV-AP205", + "device_type": "sungrow_inverter", + "group": "7#楼", + "rated_power": 290, + "model": "SG250HX", + "manufacturer": "阳光电源", + "protocol": "http_api", + "collect_interval": 900, + "connection_params": { + "api_base": "https://gateway.isolarcloud.com", + "app_key": "1BF313B6A9F919A6FB6A90BD43D23395", + "sys_code": "901", + "x_access_key": "qpthtsf287zvtmr6t3q9hsc0k70f3tay", + "user_account": "13911211695", + "user_password": "123456#ABC", + "ps_id": "", + "device_sn": "AP205" + } + }, + { + "name": "AP206组串式逆变器", + "code": "ZP-INV-AP206", + "device_type": "sungrow_inverter", + "group": "7#楼", + "rated_power": 300, + "model": "SG300HX", + "manufacturer": "阳光电源", + "protocol": "http_api", + "collect_interval": 900, + "connection_params": { + "api_base": "https://gateway.isolarcloud.com", + "app_key": "1BF313B6A9F919A6FB6A90BD43D23395", + "sys_code": "901", + "x_access_key": "qpthtsf287zvtmr6t3q9hsc0k70f3tay", + "user_account": "13911211695", + "user_password": "123456#ABC", + "ps_id": "", + "device_sn": "AP206" + } + }, + { + "name": "AP207组串式逆变器", + "code": "ZP-INV-AP207", + "device_type": "sungrow_inverter", + "group": "12#楼", + "rated_power": 280, + "model": "SG250HX", + "manufacturer": "阳光电源", + "protocol": "http_api", + "collect_interval": 900, + "connection_params": { + "api_base": "https://gateway.isolarcloud.com", + "app_key": "1BF313B6A9F919A6FB6A90BD43D23395", + "sys_code": "901", + "x_access_key": "qpthtsf287zvtmr6t3q9hsc0k70f3tay", + "user_account": "13911211695", + "user_password": "123456#ABC", + "ps_id": "", + "device_sn": "AP207" + } + }, + { + "name": "AP208组串式逆变器", + "code": "ZP-INV-AP208", + "device_type": "sungrow_inverter", + "group": "12#楼", + "rated_power": 290, + "model": "SG250HX", + "manufacturer": "阳光电源", + "protocol": "http_api", + "collect_interval": 900, + "connection_params": { + "api_base": "https://gateway.isolarcloud.com", + "app_key": "1BF313B6A9F919A6FB6A90BD43D23395", + "sys_code": "901", + "x_access_key": "qpthtsf287zvtmr6t3q9hsc0k70f3tay", + "user_account": "13911211695", + "user_password": "123456#ABC", + "ps_id": "", + "device_sn": "AP208" + } + }, + { + "name": "26号楼1#汇流箱", + "code": "ZP-CB-2601", + "device_type": "dc_combiner", + "group": "一期-26号楼", + "rated_power": 20, + "model": "PVS-16M", + "manufacturer": "阳光电源" + }, + { + "name": "26号楼2#汇流箱", + "code": "ZP-CB-2602", + "device_type": "dc_combiner", + "group": "一期-26号楼", + "rated_power": 20, + "model": "PVS-16M", + "manufacturer": "阳光电源" + }, + { + "name": "1#楼1#汇流箱", + "code": "ZP-CB-0101", + "device_type": "dc_combiner", + "group": "1#楼", + "rated_power": 30, + "model": "PVS-24M", + "manufacturer": "阳光电源" + }, + { + "name": "2#楼1#汇流箱", + "code": "ZP-CB-0201", + "device_type": "dc_combiner", + "group": "2#楼", + "rated_power": 50, + "model": "PVS-24M", + "manufacturer": "阳光电源" + }, + { + "name": "5#楼1#汇流箱", + "code": "ZP-CB-0501", + "device_type": "dc_combiner", + "group": "5#楼", + "rated_power": 60, + "model": "PVS-24M", + "manufacturer": "阳光电源" + }, + { + "name": "7#楼1#汇流箱", + "code": "ZP-CB-0701", + "device_type": "dc_combiner", + "group": "7#楼", + "rated_power": 50, + "model": "PVS-24M", + "manufacturer": "阳光电源" + }, + { + "name": "12#楼1#汇流箱", + "code": "ZP-CB-1201", + "device_type": "dc_combiner", + "group": "12#楼", + "rated_power": 50, + "model": "PVS-24M", + "manufacturer": "阳光电源" + }, + { + "name": "12#楼2#汇流箱", + "code": "ZP-CB-1202", + "device_type": "dc_combiner", + "group": "12#楼", + "rated_power": 50, + "model": "PVS-24M", + "manufacturer": "阳光电源" + } + ] +} diff --git a/customers/zpark/pricing.json b/customers/zpark/pricing.json new file mode 100644 index 0000000..d59c9bb --- /dev/null +++ b/customers/zpark/pricing.json @@ -0,0 +1,13 @@ +{ + "name": "2026年北京工商业分时电价", + "energy_type": "electricity", + "pricing_type": "tou", + "periods": [ + {"name": "peak", "start": "10:00", "end": "15:00", "price": 1.35}, + {"name": "peak", "start": "18:00", "end": "21:00", "price": 1.35}, + {"name": "flat", "start": "07:00", "end": "10:00", "price": 0.85}, + {"name": "flat", "start": "15:00", "end": "18:00", "price": 0.85}, + {"name": "valley", "start": "23:00", "end": "07:00", "price": 0.35}, + {"name": "shoulder", "start": "21:00", "end": "23:00", "price": 0.95} + ] +} diff --git a/frontend/package.json b/frontend/package.json index 1a97f80..b23556e 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -1,7 +1,7 @@ { "name": "frontend", "private": true, - "version": "0.0.0", + "version": "1.0.0", "type": "module", "scripts": { "dev": "vite", diff --git a/scripts/gitea-migration/01_export_mac_studio.sh b/scripts/gitea-migration/01_export_mac_studio.sh new file mode 100644 index 0000000..20225fb --- /dev/null +++ b/scripts/gitea-migration/01_export_mac_studio.sh @@ -0,0 +1,32 @@ +#!/bin/bash +# Run this ON Mac Studio (SSH into 100.108.180.60 first) +# Usage: ssh duwenbo@100.108.180.60 'bash -s' < 01_export_mac_studio.sh + +echo "=== Exporting Gitea from Mac Studio ===" + +# Find Gitea container name +CONTAINER=$(docker ps --filter "ancestor=gitea/gitea" --format "{{.Names}}" | head -1) +if [ -z "$CONTAINER" ]; then + CONTAINER=$(docker ps --format "{{.Names}}" | grep -i gitea | head -1) +fi +echo "Gitea container: $CONTAINER" + +# Create backup directory +mkdir -p ~/gitea-backup +cd ~/gitea-backup + +# Method 1: Try gitea dump +echo "Attempting gitea dump..." +docker exec $CONTAINER gitea dump -c /data/gitea/conf/app.ini -f /tmp/gitea-dump.zip 2>/dev/null +docker cp $CONTAINER:/tmp/gitea-dump.zip ./gitea-dump.zip 2>/dev/null + +# Method 2: Copy data volume directly +echo "Copying Gitea data volume..." +docker cp $CONTAINER:/data/gitea ./gitea-data + +echo "=== Export complete ===" +echo "Files in ~/gitea-backup/:" +ls -la ~/gitea-backup/ +echo "" +echo "Next step: Transfer to labmac3:" +echo " scp -r ~/gitea-backup duwenbo@192.168.1.77:/opt/" diff --git a/scripts/gitea-migration/02_setup_labmac3.sh b/scripts/gitea-migration/02_setup_labmac3.sh new file mode 100644 index 0000000..68ba19a --- /dev/null +++ b/scripts/gitea-migration/02_setup_labmac3.sh @@ -0,0 +1,44 @@ +#!/bin/bash +# Run this ON labmac3 (SSH into 192.168.1.77 first) +# Usage: ssh duwenbo@192.168.1.77 'bash -s' < 02_setup_labmac3.sh + +echo "=== Setting up Gitea on labmac3 ===" + +# Create directories +sudo mkdir -p /opt/gitea/data +sudo chown -R $(whoami):$(id -gn) /opt/gitea + +# Create docker-compose.yml +cat > /opt/gitea/docker-compose.yml << 'COMPOSE' +version: '3' +services: + gitea: + image: gitea/gitea:latest + container_name: gitea + environment: + - USER_UID=1000 + - USER_GID=1000 + - GITEA__database__DB_TYPE=sqlite3 + - GITEA__server__ROOT_URL=http://192.168.1.77:3300/ + - GITEA__server__HTTP_PORT=3000 + ports: + - "3300:3000" + - "2222:22" + volumes: + - ./data:/data + restart: unless-stopped +COMPOSE + +echo "=== Docker Compose file created ===" +cat /opt/gitea/docker-compose.yml + +echo "" +echo "Next steps:" +echo "1. If restoring from Mac Studio backup:" +echo " cp -r /opt/gitea-backup/gitea-data/* /opt/gitea/data/" +echo "" +echo "2. Start Gitea:" +echo " cd /opt/gitea && docker compose up -d" +echo "" +echo "3. Verify:" +echo " curl http://localhost:3300/api/v1/version" diff --git a/scripts/gitea-migration/03_restore_data.sh b/scripts/gitea-migration/03_restore_data.sh new file mode 100644 index 0000000..0110e8c --- /dev/null +++ b/scripts/gitea-migration/03_restore_data.sh @@ -0,0 +1,60 @@ +#!/bin/bash +# Run this ON labmac3 after transferring backup +# Usage: ssh duwenbo@192.168.1.77 'bash -s' < 03_restore_data.sh + +echo "=== Restoring Gitea data on labmac3 ===" + +cd /opt/gitea + +# Stop Gitea if running +docker compose down 2>/dev/null + +# Restore data from backup +if [ -d "/opt/gitea-backup/gitea-data" ]; then + echo "Restoring from data volume backup..." + cp -r /opt/gitea-backup/gitea-data/* ./data/ 2>/dev/null + # Fix the ROOT_URL in app.ini to point to new IP + if [ -f "./data/conf/app.ini" ]; then + sed -i 's|ROOT_URL.*=.*|ROOT_URL = http://192.168.1.77:3300/|' ./data/conf/app.ini + sed -i 's|SSH_DOMAIN.*=.*|SSH_DOMAIN = 192.168.1.77|' ./data/conf/app.ini + echo "Updated ROOT_URL and SSH_DOMAIN in app.ini" + fi +elif [ -f "/opt/gitea-backup/gitea-dump.zip" ]; then + echo "Restoring from gitea dump..." + unzip /opt/gitea-backup/gitea-dump.zip -d /tmp/gitea-restore + # Copy repos and database + cp -r /tmp/gitea-restore/repos/* ./data/gitea/repositories/ 2>/dev/null + cp /tmp/gitea-restore/gitea-db.sql ./data/ 2>/dev/null +fi + +# Fix permissions +sudo chown -R 1000:1000 ./data + +# Start Gitea +docker compose up -d + +echo "Waiting for Gitea to start..." +sleep 10 + +# Verify +echo "=== Verification ===" +curl -s http://localhost:3300/api/v1/version +echo "" +curl -s http://localhost:3300/api/v1/repos/search?limit=5 | python3 -c " +import sys,json +try: + data=json.load(sys.stdin) + repos = data.get('data', data) if isinstance(data, dict) else data + print(f'Repos found: {len(repos)}') + for r in repos[:5]: + name = r.get('full_name', r.get('name', '?')) + print(f' {name}') +except: print('Could not parse response') +" 2>/dev/null + +echo "" +echo "=== Gitea should now be accessible at: ===" +echo " http://192.168.1.77:3300/" +echo "" +echo "If this is a fresh install (no backup), create admin:" +echo " docker exec -it gitea gitea admin user create --admin --username tianpu --password 'TianpuGit2026!' --email admin@tianpu.com" diff --git a/scripts/gitea-migration/04_update_developer_remotes.sh b/scripts/gitea-migration/04_update_developer_remotes.sh new file mode 100644 index 0000000..19d0e03 --- /dev/null +++ b/scripts/gitea-migration/04_update_developer_remotes.sh @@ -0,0 +1,27 @@ +#!/bin/bash +# Run this on each developer's machine +# Usage: cd tianpu-ems && bash path/to/04_update_developer_remotes.sh + +echo "=== Updating Git remotes to labmac3 ===" + +OLD_ORIGIN=$(git remote get-url origin 2>/dev/null) +echo "Current origin: $OLD_ORIGIN" + +# Update origin to labmac3 +git remote set-url origin http://192.168.1.77:3300/tianpu/tianpu-ems.git + +# Keep old Mac Studio as backup remote +git remote remove mac-studio 2>/dev/null +git remote add mac-studio "$OLD_ORIGIN" 2>/dev/null + +NEW_ORIGIN=$(git remote get-url origin) +echo "New origin: $NEW_ORIGIN" + +# Test connectivity +echo "" +echo "Testing connection..." +git ls-remote origin HEAD 2>&1 | head -3 + +echo "" +echo "=== Done ===" +echo "You can now: git pull origin main" diff --git a/scripts/gitea-migration/05_create_repos.sh b/scripts/gitea-migration/05_create_repos.sh new file mode 100644 index 0000000..f873b58 --- /dev/null +++ b/scripts/gitea-migration/05_create_repos.sh @@ -0,0 +1,76 @@ +#!/bin/bash +# Run this AFTER Gitea is running on labmac3 +# Creates the 3-repo structure: ems-core, tp-ems, zpark-ems +# Usage: bash 05_create_repos.sh + +GITEA_URL="http://192.168.1.77:3300" +ADMIN_USER="tianpu" +ADMIN_PASS="TianpuGit2026!" + +echo "=== Creating 3-repo structure on Gitea ===" + +# Get admin token +TOKEN=$(curl -s -X POST "$GITEA_URL/api/v1/users/$ADMIN_USER/tokens" \ + -u "$ADMIN_USER:$ADMIN_PASS" \ + -H "Content-Type: application/json" \ + -d "{\"name\":\"setup-$(date +%s)\",\"scopes\":[\"all\"]}" | python -c "import sys,json; print(json.load(sys.stdin).get('sha1',''))") + +echo "Token: ${TOKEN:0:8}..." + +# Create ems-core repo +echo "" +echo "Creating ems-core repo..." +curl -s -X POST "$GITEA_URL/api/v1/orgs/tianpu/repos" \ + -H "Authorization: token $TOKEN" \ + -H "Content-Type: application/json" \ + -d '{ + "name": "ems-core", + "description": "EMS标准产品核心代码 - 共享后端/前端/数据模型", + "private": false, + "default_branch": "main" + }' | python -c "import sys,json; d=json.load(sys.stdin); print(f' Created: {d.get(\"full_name\",\"ERROR\")}') if 'id' in d else print(f' {d.get(\"message\",d)}')" + +# Create tp-ems repo +echo "" +echo "Creating tp-ems repo..." +curl -s -X POST "$GITEA_URL/api/v1/orgs/tianpu/repos" \ + -H "Authorization: token $TOKEN" \ + -H "Content-Type: application/json" \ + -d '{ + "name": "tp-ems", + "description": "天普大兴园区EMS - 客户定制项目", + "private": false, + "default_branch": "main" + }' | python -c "import sys,json; d=json.load(sys.stdin); print(f' Created: {d.get(\"full_name\",\"ERROR\")}') if 'id' in d else print(f' {d.get(\"message\",d)}')" + +# Create zpark-ems repo +echo "" +echo "Creating zpark-ems repo..." +curl -s -X POST "$GITEA_URL/api/v1/orgs/tianpu/repos" \ + -H "Authorization: token $TOKEN" \ + -H "Content-Type: application/json" \ + -d '{ + "name": "zpark-ems", + "description": "中关村医疗器械园EMS - 客户定制项目", + "private": false, + "default_branch": "main" + }' | python -c "import sys,json; d=json.load(sys.stdin); print(f' Created: {d.get(\"full_name\",\"ERROR\")}') if 'id' in d else print(f' {d.get(\"message\",d)}')" + +# Add all developers as collaborators to all repos +echo "" +echo "Adding collaborators..." +for REPO in ems-core tp-ems zpark-ems; do + for USER in duwenbo hanbing zhangshiyue wangliwei yangruixiao; do + curl -s -o /dev/null -X PUT "$GITEA_URL/api/v1/repos/tianpu/$REPO/collaborators/$USER" \ + -H "Authorization: token $TOKEN" \ + -H "Content-Type: application/json" \ + -d '{"permission": "write"}' + done + echo " $REPO: all 5 developers added" +done + +echo "" +echo "=== All 3 repos created ===" +echo " $GITEA_URL/tianpu/ems-core" +echo " $GITEA_URL/tianpu/tp-ems" +echo " $GITEA_URL/tianpu/zpark-ems" diff --git a/scripts/gitea-migration/README.md b/scripts/gitea-migration/README.md new file mode 100644 index 0000000..352cbe0 --- /dev/null +++ b/scripts/gitea-migration/README.md @@ -0,0 +1,15 @@ +# Gitea 迁移指南:Mac Studio → labmac3 + +## 执行顺序 + +1. `01_export_mac_studio.sh` — 在Mac Studio上导出Gitea数据 +2. 手动传输:`scp -r ~/gitea-backup duwenbo@192.168.1.77:/opt/` +3. `02_setup_labmac3.sh` — 在labmac3上部署Gitea容器 +4. `03_restore_data.sh` — 恢复数据并验证 +5. `04_update_developer_remotes.sh` — 各开发者更新Git远程地址 +6. `05_create_repos.sh` — 创建3个仓库(ems-core, tp-ems, zpark-ems) + +## 注意事项 +- 所有SSH操作需要手动输入密码 +- 迁移前确保labmac3已安装Docker +- 迁移完成后保留Mac Studio Gitea 1-2周作为备份 diff --git a/scripts/seed_zpark.py b/scripts/seed_zpark.py new file mode 100644 index 0000000..47ef583 --- /dev/null +++ b/scripts/seed_zpark.py @@ -0,0 +1,189 @@ +"""种子数据 - 中关村医疗器械园光伏设备、告警规则、碳排放因子、电价配置""" +import asyncio +import json +import sys +import os + +sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "backend")) + +from sqlalchemy import select +from app.core.database import async_session, engine +from app.models.device import Device, DeviceType, DeviceGroup +from app.models.alarm import AlarmRule +from app.models.carbon import EmissionFactor +from app.models.pricing import ElectricityPricing, PricingPeriod + + +# Path to device definitions +DEVICES_JSON = os.path.join(os.path.dirname(__file__), "..", "customers", "zpark", "devices.json") +PRICING_JSON = os.path.join(os.path.dirname(__file__), "..", "customers", "zpark", "pricing.json") + + +async def seed(): + with open(DEVICES_JSON, "r", encoding="utf-8") as f: + config = json.load(f) + + async with async_session() as session: + # ================================================================= + # 1. 设备类型 + # ================================================================= + for dt in config["device_types"]: + # Check if type already exists (may overlap with tianpu seed) + existing = await session.execute( + select(DeviceType).where(DeviceType.code == dt["code"]) + ) + if existing.scalar_one_or_none() is None: + session.add(DeviceType( + code=dt["code"], + name=dt["name"], + icon=dt.get("icon"), + data_fields=dt.get("data_fields"), + )) + await session.flush() + + # ================================================================= + # 2. 设备分组 (hierarchical) + # ================================================================= + group_name_to_id = {} + + async def create_groups(groups, parent_id=None): + for g in groups: + grp = DeviceGroup( + name=g["name"], + parent_id=parent_id, + location=g.get("location"), + description=g.get("description"), + ) + session.add(grp) + await session.flush() + group_name_to_id[g["name"]] = grp.id + if "children" in g: + await create_groups(g["children"], parent_id=grp.id) + + await create_groups(config["device_groups"]) + + # ================================================================= + # 3. 设备 + # ================================================================= + devices = [] + for d in config["devices"]: + group_id = group_name_to_id.get(d.get("group")) + device = Device( + name=d["name"], + code=d["code"], + device_type=d["device_type"], + group_id=group_id, + model=d.get("model"), + manufacturer=d.get("manufacturer"), + rated_power=d.get("rated_power"), + location=d.get("location", ""), + protocol=d.get("protocol", "http_api"), + connection_params=d.get("connection_params"), + collect_interval=d.get("collect_interval", 900), + status="offline", + is_active=True, + ) + devices.append(device) + session.add_all(devices) + await session.flush() + + # ================================================================= + # 4. 碳排放因子 (光伏减排) + # ================================================================= + # Check if PV generation factor already exists + existing_factor = await session.execute( + select(EmissionFactor).where(EmissionFactor.energy_type == "pv_generation") + ) + if existing_factor.scalar_one_or_none() is None: + session.add(EmissionFactor( + name="华北电网光伏减排因子", + energy_type="pv_generation", + factor=0.8843, + unit="kWh", + scope=2, + region="north_china", + source="等量替代电网电力", + year=2023, + )) + await session.flush() + + # ================================================================= + # 5. 告警规则 (逆变器监控) + # ================================================================= + alarm_rules = [ + AlarmRule( + name="逆变器功率过低告警", + device_type="sungrow_inverter", + data_type="power", + condition="lt", + threshold=1.0, + duration=1800, + severity="warning", + notify_channels=["app", "wechat"], + is_active=True, + ), + AlarmRule( + name="逆变器通信中断告警", + device_type="sungrow_inverter", + data_type="power", + condition="eq", + threshold=0.0, + duration=3600, + severity="critical", + notify_channels=["app", "sms", "wechat"], + is_active=True, + ), + AlarmRule( + name="逆变器过温告警", + device_type="sungrow_inverter", + data_type="temperature", + condition="gt", + threshold=70.0, + duration=120, + severity="major", + notify_channels=["app", "sms"], + is_active=True, + ), + ] + session.add_all(alarm_rules) + + # ================================================================= + # 6. 电价配置 + # ================================================================= + if os.path.exists(PRICING_JSON): + with open(PRICING_JSON, "r", encoding="utf-8") as f: + pricing_config = json.load(f) + + pricing = ElectricityPricing( + name=pricing_config["name"], + energy_type=pricing_config.get("energy_type", "electricity"), + pricing_type=pricing_config.get("pricing_type", "tou"), + is_active=True, + ) + session.add(pricing) + await session.flush() + + for period in pricing_config.get("periods", []): + session.add(PricingPeriod( + pricing_id=pricing.id, + period_name=period["name"], + start_time=period["start"], + end_time=period["end"], + price_per_unit=period["price"], + )) + + await session.commit() + print("Z-Park seed data created successfully!") + + # Print summary + dev_count = len(config["devices"]) + group_count = len(group_name_to_id) + print(f" - Device types: {len(config['device_types'])}") + print(f" - Device groups: {group_count}") + print(f" - Devices: {dev_count}") + print(f" - Alarm rules: {len(alarm_rules)}") + print(f" - Pricing periods: {len(pricing_config.get('periods', []))}") + + +if __name__ == "__main__": + asyncio.run(seed())