Compare commits
7 Commits
6810de2583
...
add-v2
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b35ff74cdb | ||
| 95e5e1438f | |||
|
|
364f9259b3 | ||
| 39a86ee2f9 | |||
|
|
d073456c2f | ||
|
|
680335fba1 | ||
|
|
5cf970df62 |
@@ -1,2 +0,0 @@
|
||||
fastapi
|
||||
uvicorn[standard]
|
||||
@@ -1,11 +0,0 @@
|
||||
from fastapi import FastAPI
|
||||
|
||||
app = FastAPI(title="HanchuESS Solar Backend API")
|
||||
|
||||
@app.get("/", tags=["Root"])
|
||||
def root():
|
||||
return {"message": "Welcome to the HanchuESS Solar Backend API!"}
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(app, host="0.0.0.0", port=8050)
|
||||
@@ -1,8 +0,0 @@
|
||||
services:
|
||||
hanchuess-solar-backend:
|
||||
build:
|
||||
context: backend
|
||||
dockerfile: docker/Dockerfile
|
||||
container_name: hanchuess-solar-backend
|
||||
ports:
|
||||
- "8050:8050"
|
||||
24
v1/.env.example
Normal file
24
v1/.env.example
Normal file
@@ -0,0 +1,24 @@
|
||||
# HanchuESS API Configuration
|
||||
# Copy this file to .env and fill in your actual values
|
||||
|
||||
# Required: AES encryption key (must be 16, 24, or 32 bytes)
|
||||
HANCHU_AES_KEY=your_aes_key_here
|
||||
|
||||
# Required: AES initialization vector (must be 16 bytes)
|
||||
HANCHU_AES_IV=your_aes_iv_here
|
||||
|
||||
# Required: Login URL for the HanchuESS API
|
||||
HANCHU_LOGIN_URL=https://api.example.com/login
|
||||
|
||||
# Optional: Login type (default: ACCOUNT)
|
||||
HANCHU_LOGIN_TYPE=ACCOUNT
|
||||
|
||||
# Optional: HTTP timeout in seconds (default: 10)
|
||||
HANCHU_HTTP_TIMEOUT=10
|
||||
|
||||
# Optional: Verify SSL certificates (default: true, set to false for self-signed certs)
|
||||
HANCHU_VERIFY_SSL=true
|
||||
|
||||
# Optional: Username and password
|
||||
HANCHU_USERNAME=
|
||||
HANCHU_PASSWORD=
|
||||
0
.gitignore → v1/.gitignore
vendored
0
.gitignore → v1/.gitignore
vendored
5
v1/backend/requirements.txt
Normal file
5
v1/backend/requirements.txt
Normal file
@@ -0,0 +1,5 @@
|
||||
fastapi
|
||||
uvicorn[standard]
|
||||
requests>=2.31.0
|
||||
pycryptodome>=3.20.0
|
||||
python-dotenv>=1.0.1
|
||||
1
v1/backend/src/database/__init__.py
Normal file
1
v1/backend/src/database/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# Database package
|
||||
389
v1/backend/src/database/db.py
Normal file
389
v1/backend/src/database/db.py
Normal file
@@ -0,0 +1,389 @@
|
||||
"""
|
||||
Database module for storing HanchuESS power readings.
|
||||
Uses SQLite with thread-safe operations.
|
||||
"""
|
||||
import os
|
||||
import sqlite3
|
||||
from datetime import datetime
|
||||
from typing import List, Dict, Optional
|
||||
from contextlib import contextmanager
|
||||
|
||||
|
||||
class PowerReadingsDB:
|
||||
"""Database manager for power readings."""
|
||||
|
||||
def __init__(self, db_path: str = None):
|
||||
"""
|
||||
Initialize database connection.
|
||||
|
||||
Args:
|
||||
db_path: Path to SQLite database file. If None, reads from SQLITE_DB_PATH env var.
|
||||
"""
|
||||
if db_path is None:
|
||||
db_path = os.getenv("SQLITE_DB_PATH", "./hanchuess_solar.db")
|
||||
|
||||
self.db_path = db_path
|
||||
self._init_database()
|
||||
|
||||
@contextmanager
|
||||
def _get_connection(self):
|
||||
"""Context manager for database connections."""
|
||||
conn = sqlite3.connect(self.db_path, check_same_thread=False)
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
yield conn
|
||||
conn.commit()
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
raise e
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def _init_database(self):
|
||||
"""Create tables if they don't exist."""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS power_readings (
|
||||
-- Primary key and timestamp
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
timestamp DATETIME NOT NULL,
|
||||
data_source VARCHAR(20) DEFAULT 'realtime', -- 'realtime' or 'backfill'
|
||||
|
||||
-- Device information
|
||||
sn VARCHAR(50) NOT NULL, -- Serial number (e.g., "H016164380126")
|
||||
device_model VARCHAR(10), -- devModel: Device model identifier
|
||||
device_status INTEGER, -- devStatus: Device status code
|
||||
|
||||
-- Solar/PV metrics (photovoltaic generation)
|
||||
pv_total_power REAL, -- pvTtPwr: Current PV generation in watts
|
||||
pv_today_generation REAL, -- pvTdGe: Today's total PV generation in kWh
|
||||
pv_today_grid_export REAL, -- pvDge: PV direct to grid today in kWh
|
||||
|
||||
-- Battery metrics
|
||||
battery_power REAL, -- batP: Current charge(+)/discharge(-) power in watts
|
||||
battery_soc REAL, -- batSoc: State of charge percentage (0-100)
|
||||
battery_today_charge REAL, -- batTdChg: Today's charge energy in kWh
|
||||
battery_today_discharge REAL, -- batTdDschg: Today's discharge energy in kWh
|
||||
battery_status INTEGER, -- batSts: Battery status code
|
||||
battery_design_capacity REAL, -- bmsDesignCap: Design capacity in kWh
|
||||
|
||||
-- Load/consumption metrics
|
||||
load_power REAL, -- loadPwr: Current total load in watts
|
||||
load_eps_power REAL, -- loadEpsPwr: EPS (backup) load power in watts
|
||||
load_today_energy REAL, -- loadTdEe: Today's consumption in kWh
|
||||
|
||||
-- Grid metrics
|
||||
grid_today_feed REAL, -- gridTdFe: Today's grid feed energy in kWh
|
||||
grid_today_export REAL, -- gridTdEe: Today's grid export energy in kWh
|
||||
meter_power REAL, -- meterPPwr: Current meter power reading in watts
|
||||
grid_power_sum REAL, -- pwrGridSum: Grid power sum
|
||||
|
||||
-- System status and operating mode
|
||||
work_mode INTEGER, -- workMode: Operating mode code
|
||||
work_mode_combined INTEGER,-- workModeCmb: Combined work mode
|
||||
work_mode_flag INTEGER, -- workModeFlag: Work mode flag
|
||||
total_power INTEGER, -- pPwr: Total system power in watts
|
||||
|
||||
-- Bypass meter (if installed)
|
||||
bypass_meter_switch INTEGER, -- bypMeterSwitch: Bypass meter on/off
|
||||
bypass_meter_total_power REAL, -- bypMeterTotalPower: Bypass meter power
|
||||
bypass_install_direction INTEGER, -- bypInstallDirection: Installation direction
|
||||
|
||||
-- Generator (if present)
|
||||
dg_run_state INTEGER, -- dgRunState: Generator running state
|
||||
dg_grid_state INTEGER, -- dgGridState: Generator grid connection state
|
||||
|
||||
-- Additional features
|
||||
has_charger INTEGER, -- hasCharger: Has EV charger
|
||||
has_heat INTEGER, -- hasHeat: Has heating system
|
||||
has_os INTEGER, -- hasOs: Has operating system feature
|
||||
has_sg INTEGER -- hasSg: Has smart grid feature
|
||||
)
|
||||
""")
|
||||
|
||||
# Create indexes for common queries
|
||||
cursor.execute("""
|
||||
CREATE INDEX IF NOT EXISTS idx_timestamp
|
||||
ON power_readings(timestamp DESC)
|
||||
""")
|
||||
|
||||
cursor.execute("""
|
||||
CREATE INDEX IF NOT EXISTS idx_sn_timestamp
|
||||
ON power_readings(sn, timestamp DESC)
|
||||
""")
|
||||
|
||||
# Create unique constraint to prevent duplicate timestamps per device
|
||||
cursor.execute("""
|
||||
CREATE UNIQUE INDEX IF NOT EXISTS idx_unique_sn_timestamp
|
||||
ON power_readings(sn, timestamp)
|
||||
""")
|
||||
|
||||
conn.commit()
|
||||
|
||||
def insert_reading(self, data: Dict, data_source: str = 'realtime') -> int:
|
||||
"""
|
||||
Insert a power reading into the database.
|
||||
|
||||
Args:
|
||||
data: Dictionary containing the API response data
|
||||
data_source: Source of data ('realtime' or 'backfill')
|
||||
|
||||
Returns:
|
||||
The ID of the inserted row, or None if duplicate
|
||||
"""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Extract nested data if present
|
||||
reading_data = data.get('data', data)
|
||||
|
||||
try:
|
||||
cursor.execute("""
|
||||
INSERT INTO power_readings (
|
||||
timestamp, data_source, sn, device_model, device_status,
|
||||
pv_total_power, pv_today_generation, pv_today_grid_export,
|
||||
battery_power, battery_soc, battery_today_charge, battery_today_discharge,
|
||||
battery_status, battery_design_capacity,
|
||||
load_power, load_eps_power, load_today_energy,
|
||||
grid_today_feed, grid_today_export, meter_power, grid_power_sum,
|
||||
work_mode, work_mode_combined, work_mode_flag, total_power,
|
||||
bypass_meter_switch, bypass_meter_total_power, bypass_install_direction,
|
||||
dg_run_state, dg_grid_state,
|
||||
has_charger, has_heat, has_os, has_sg
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", (
|
||||
datetime.now().isoformat(),
|
||||
data_source,
|
||||
reading_data.get('sn'),
|
||||
reading_data.get('devModel'),
|
||||
reading_data.get('devStatus'),
|
||||
|
||||
self._to_float(reading_data.get('pvTtPwr')),
|
||||
self._to_float(reading_data.get('pvTdGe')),
|
||||
self._to_float(reading_data.get('pvDge')),
|
||||
|
||||
self._to_float(reading_data.get('batP')),
|
||||
self._to_float(reading_data.get('batSoc')),
|
||||
self._to_float(reading_data.get('batTdChg')),
|
||||
self._to_float(reading_data.get('batTdDschg')),
|
||||
reading_data.get('batSts'),
|
||||
self._to_float(reading_data.get('bmsDesignCap')),
|
||||
|
||||
self._to_float(reading_data.get('loadPwr')),
|
||||
self._to_float(reading_data.get('loadEpsPwr')),
|
||||
self._to_float(reading_data.get('loadTdEe')),
|
||||
|
||||
self._to_float(reading_data.get('gridTdFe')),
|
||||
self._to_float(reading_data.get('gridTdEe')),
|
||||
self._to_float(reading_data.get('meterPPwr')),
|
||||
self._to_float(reading_data.get('pwrGridSum')),
|
||||
|
||||
reading_data.get('workMode'),
|
||||
reading_data.get('workModeCmb'),
|
||||
reading_data.get('workModeFlag'),
|
||||
reading_data.get('pPwr'),
|
||||
|
||||
reading_data.get('bypMeterSwitch'),
|
||||
self._to_float(reading_data.get('bypMeterTotalPower')),
|
||||
reading_data.get('bypInstallDirection'),
|
||||
|
||||
reading_data.get('dgRunState'),
|
||||
reading_data.get('dgGridState'),
|
||||
|
||||
1 if reading_data.get('hasCharger') else 0,
|
||||
1 if reading_data.get('hasHeat') else 0,
|
||||
1 if reading_data.get('hasOs') else 0,
|
||||
1 if reading_data.get('hasSg') else 0
|
||||
))
|
||||
|
||||
return cursor.lastrowid
|
||||
except Exception as e:
|
||||
# If it's a unique constraint violation, return None (duplicate)
|
||||
if "UNIQUE constraint failed" in str(e):
|
||||
return None
|
||||
raise
|
||||
|
||||
def get_readings(
|
||||
self,
|
||||
limit: int = 100,
|
||||
start_date: Optional[str] = None,
|
||||
end_date: Optional[str] = None,
|
||||
sn: Optional[str] = None
|
||||
) -> List[Dict]:
|
||||
"""
|
||||
Query power readings from the database.
|
||||
|
||||
Args:
|
||||
limit: Maximum number of records to return
|
||||
start_date: Start date (ISO format) for filtering
|
||||
end_date: End date (ISO format) for filtering
|
||||
sn: Serial number to filter by
|
||||
|
||||
Returns:
|
||||
List of reading dictionaries
|
||||
"""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
query = "SELECT * FROM power_readings WHERE 1=1"
|
||||
params = []
|
||||
|
||||
if sn:
|
||||
query += " AND sn = ?"
|
||||
params.append(sn)
|
||||
|
||||
if start_date:
|
||||
query += " AND timestamp >= ?"
|
||||
params.append(start_date)
|
||||
|
||||
if end_date:
|
||||
query += " AND timestamp <= ?"
|
||||
params.append(end_date)
|
||||
|
||||
query += " ORDER BY timestamp DESC LIMIT ?"
|
||||
params.append(limit)
|
||||
|
||||
cursor.execute(query, params)
|
||||
|
||||
return [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
def get_latest_reading(self, sn: Optional[str] = None) -> Optional[Dict]:
|
||||
"""
|
||||
Get the most recent power reading.
|
||||
|
||||
Args:
|
||||
sn: Optional serial number to filter by
|
||||
|
||||
Returns:
|
||||
Latest reading dictionary or None
|
||||
"""
|
||||
readings = self.get_readings(limit=1, sn=sn)
|
||||
return readings[0] if readings else None
|
||||
|
||||
def get_stats(self, sn: Optional[str] = None) -> Dict:
|
||||
"""
|
||||
Get statistics about stored readings.
|
||||
|
||||
Args:
|
||||
sn: Optional serial number to filter by
|
||||
|
||||
Returns:
|
||||
Dictionary with count, first/last timestamps
|
||||
"""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
query = """
|
||||
SELECT
|
||||
COUNT(*) as count,
|
||||
MIN(timestamp) as first_reading,
|
||||
MAX(timestamp) as last_reading
|
||||
FROM power_readings
|
||||
"""
|
||||
|
||||
params = []
|
||||
if sn:
|
||||
query += " WHERE sn = ?"
|
||||
params.append(sn)
|
||||
|
||||
cursor.execute(query, params)
|
||||
row = cursor.fetchone()
|
||||
|
||||
return dict(row) if row else {}
|
||||
|
||||
@staticmethod
|
||||
def _to_float(value) -> Optional[float]:
|
||||
"""Convert string or numeric value to float, handling None."""
|
||||
if value is None or value == "":
|
||||
return None
|
||||
try:
|
||||
return float(value)
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
|
||||
def check_day_coverage(self, date: str, sn: Optional[str] = None) -> Dict:
|
||||
"""
|
||||
Check what data exists for a specific day.
|
||||
|
||||
Args:
|
||||
date: Date string in ISO format (e.g., "2026-01-13")
|
||||
sn: Optional serial number filter
|
||||
|
||||
Returns:
|
||||
Dictionary with coverage information
|
||||
"""
|
||||
from datetime import datetime
|
||||
|
||||
# Parse date and get day boundaries
|
||||
day = datetime.fromisoformat(date.split('T')[0])
|
||||
start = day.replace(hour=0, minute=0, second=0, microsecond=0).isoformat()
|
||||
end = day.replace(hour=23, minute=59, second=59, microsecond=999999).isoformat()
|
||||
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
query = """
|
||||
SELECT COUNT(*) as count,
|
||||
MIN(timestamp) as first_reading,
|
||||
MAX(timestamp) as last_reading
|
||||
FROM power_readings
|
||||
WHERE timestamp >= ? AND timestamp <= ?
|
||||
"""
|
||||
params = [start, end]
|
||||
|
||||
if sn:
|
||||
query += " AND sn = ?"
|
||||
params.append(sn)
|
||||
|
||||
cursor.execute(query, params)
|
||||
row = cursor.fetchone()
|
||||
|
||||
return {
|
||||
"date": date,
|
||||
"has_data": row['count'] > 0,
|
||||
"reading_count": row['count'],
|
||||
"first_reading": row['first_reading'],
|
||||
"last_reading": row['last_reading'],
|
||||
"expected_readings": 1440 # One per minute for full day
|
||||
}
|
||||
|
||||
def insert_minute_reading(self, timestamp: str, data: Dict, sn: str, data_source: str = 'backfill') -> Optional[int]:
|
||||
"""
|
||||
Insert a single minute reading from historical data.
|
||||
|
||||
Args:
|
||||
timestamp: ISO format timestamp
|
||||
data: Data dictionary for that minute
|
||||
sn: Serial number
|
||||
data_source: 'realtime' or 'backfill'
|
||||
|
||||
Returns:
|
||||
Row ID if inserted, None if duplicate
|
||||
"""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute("""
|
||||
INSERT INTO power_readings (
|
||||
timestamp, data_source, sn,
|
||||
pv_total_power, battery_power, battery_soc,
|
||||
load_power, grid_power_sum
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", (
|
||||
timestamp,
|
||||
data_source,
|
||||
sn,
|
||||
self._to_float(data.get('pvTtPwr')),
|
||||
self._to_float(data.get('batP')),
|
||||
self._to_float(data.get('batSoc')),
|
||||
self._to_float(data.get('loadPwr')),
|
||||
self._to_float(data.get('pwrGridSum'))
|
||||
))
|
||||
|
||||
return cursor.lastrowid
|
||||
except Exception as e:
|
||||
if "UNIQUE constraint failed" in str(e):
|
||||
return None
|
||||
raise
|
||||
417
v1/backend/src/main.py
Normal file
417
v1/backend/src/main.py
Normal file
@@ -0,0 +1,417 @@
|
||||
from fastapi import FastAPI
|
||||
from pydantic import BaseModel
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import Optional
|
||||
import os
|
||||
import sqlite3
|
||||
|
||||
from service.monitoring_service import get_monitoring_service
|
||||
from service.backfill_service import get_backfill_service
|
||||
from database.db import PowerReadingsDB
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Lifespan context manager for startup and shutdown events."""
|
||||
# Startup: Initialize database
|
||||
db = PowerReadingsDB()
|
||||
print(f"Database initialized at: {db.db_path}")
|
||||
|
||||
yield
|
||||
|
||||
# Shutdown: Stop monitoring if running
|
||||
monitoring = get_monitoring_service()
|
||||
if monitoring.is_running:
|
||||
await monitoring.stop()
|
||||
print("Monitoring service stopped")
|
||||
|
||||
|
||||
app = FastAPI(title="HanchuESS Solar Backend API", lifespan=lifespan)
|
||||
|
||||
|
||||
class DecryptRequest(BaseModel):
|
||||
encrypted_payload: str
|
||||
|
||||
|
||||
@app.get("/", tags=["Root"])
|
||||
def root():
|
||||
return {"message": "Welcome to the HanchuESS Solar Backend API!"}
|
||||
|
||||
@app.get("/test_sqlite_health", tags=["Database"])
|
||||
def test_sqlite_health():
|
||||
"""Test SQLite database health by creating a test table and inserting a record"""
|
||||
db_path = os.getenv("SQLITE_DB_PATH")
|
||||
conn = sqlite3.connect(db_path)
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
# Create test table
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS test_health (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
test_value TEXT NOT NULL
|
||||
)
|
||||
""")
|
||||
|
||||
# Insert test record
|
||||
cursor.execute("INSERT INTO test_health (test_value) VALUES (?)", ("health_check",))
|
||||
conn.commit()
|
||||
|
||||
return {"status": "success", "message": "SQLite database is healthy."}
|
||||
except Exception as e:
|
||||
return {"status": "error", "message": str(e)}
|
||||
finally:
|
||||
cursor.close()
|
||||
conn.close()
|
||||
|
||||
@app.get("/get_access_token", tags=["HanchuESS"])
|
||||
def get_access_token():
|
||||
"""Get access token by logging into HanchuESS"""
|
||||
from service.hanchu_service import HanchuESSService
|
||||
|
||||
hanchu_service = HanchuESSService()
|
||||
try:
|
||||
access_token = hanchu_service.get_access_token()
|
||||
return {"access_token": access_token}
|
||||
except Exception as e:
|
||||
return {"error": str(e)}
|
||||
|
||||
@app.post("/decrypt_payload", tags=["Payload"])
|
||||
def decrypt_payload(request: DecryptRequest):
|
||||
"""Decrypt an AES-encrypted HanchuESS payload"""
|
||||
from service.hanchu_service import HanchuESSService
|
||||
|
||||
try:
|
||||
hanchu_service = HanchuESSService()
|
||||
decrypted_data = hanchu_service.decrypt_payload(request.encrypted_payload)
|
||||
|
||||
return {
|
||||
"decrypted_data": decrypted_data,
|
||||
"data_type": type(decrypted_data).__name__
|
||||
}
|
||||
except Exception as e:
|
||||
import traceback
|
||||
return {"error": str(e), "traceback": traceback.format_exc()}
|
||||
|
||||
@app.get("/get_power_chart", tags=["HanchuESS"])
|
||||
def get_power_chart():
|
||||
"""Get 65-second power chart data from HanchuESS"""
|
||||
from service.hanchu_service import HanchuESSService
|
||||
|
||||
try:
|
||||
hanchu_service = HanchuESSService()
|
||||
|
||||
# Get power chart data (will automatically handle authentication)
|
||||
power_data = hanchu_service.get_power_chart()
|
||||
|
||||
return power_data
|
||||
except Exception as e:
|
||||
import traceback
|
||||
return {"error": str(e), "traceback": traceback.format_exc()}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# MONITORING ENDPOINTS
|
||||
# ============================================================================
|
||||
|
||||
@app.post("/monitoring/start", tags=["Monitoring"])
|
||||
async def start_monitoring():
|
||||
"""Start the 65-second monitoring service"""
|
||||
monitoring = get_monitoring_service()
|
||||
success = await monitoring.start()
|
||||
|
||||
if success:
|
||||
return {
|
||||
"status": "started",
|
||||
"message": "Monitoring service started successfully",
|
||||
"poll_interval": monitoring.poll_interval
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"status": "already_running",
|
||||
"message": "Monitoring service is already running"
|
||||
}
|
||||
|
||||
@app.post("/monitoring/stop", tags=["Monitoring"])
|
||||
async def stop_monitoring():
|
||||
"""Stop the monitoring service"""
|
||||
monitoring = get_monitoring_service()
|
||||
success = await monitoring.stop()
|
||||
|
||||
if success:
|
||||
return {
|
||||
"status": "stopped",
|
||||
"message": "Monitoring service stopped successfully"
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"status": "not_running",
|
||||
"message": "Monitoring service was not running"
|
||||
}
|
||||
|
||||
@app.get("/monitoring/status", tags=["Monitoring"])
|
||||
def get_monitoring_status():
|
||||
"""Get current monitoring service status"""
|
||||
monitoring = get_monitoring_service()
|
||||
return monitoring.get_status()
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# DATABASE QUERY ENDPOINTS
|
||||
# ============================================================================
|
||||
|
||||
@app.get("/readings", tags=["Database"])
|
||||
def get_readings(
|
||||
limit: int = 100,
|
||||
start_date: Optional[str] = None,
|
||||
end_date: Optional[str] = None,
|
||||
sn: Optional[str] = None
|
||||
):
|
||||
"""
|
||||
Get stored power readings from the database.
|
||||
|
||||
Args:
|
||||
limit: Maximum number of records (default: 100)
|
||||
start_date: Start date in ISO format (e.g., "2026-01-13T00:00:00")
|
||||
end_date: End date in ISO format
|
||||
sn: Serial number to filter by
|
||||
"""
|
||||
try:
|
||||
db = PowerReadingsDB()
|
||||
readings = db.get_readings(
|
||||
limit=limit,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
sn=sn
|
||||
)
|
||||
return {
|
||||
"count": len(readings),
|
||||
"readings": readings
|
||||
}
|
||||
except Exception as e:
|
||||
import traceback
|
||||
return {"error": str(e), "traceback": traceback.format_exc()}
|
||||
|
||||
@app.get("/readings/latest", tags=["Database"])
|
||||
def get_latest_reading(sn: Optional[str] = None):
|
||||
"""Get the most recent power reading"""
|
||||
try:
|
||||
db = PowerReadingsDB()
|
||||
reading = db.get_latest_reading(sn=sn)
|
||||
|
||||
if reading:
|
||||
return reading
|
||||
else:
|
||||
return {"message": "No readings found"}
|
||||
except Exception as e:
|
||||
import traceback
|
||||
return {"error": str(e), "traceback": traceback.format_exc()}
|
||||
|
||||
@app.get("/readings/stats", tags=["Database"])
|
||||
def get_reading_stats(sn: Optional[str] = None):
|
||||
"""Get statistics about stored readings"""
|
||||
try:
|
||||
db = PowerReadingsDB()
|
||||
stats = db.get_stats(sn=sn)
|
||||
return stats
|
||||
except Exception as e:
|
||||
import traceback
|
||||
return {"error": str(e), "traceback": traceback.format_exc()}
|
||||
|
||||
@app.get("/readings/gaps", tags=["Database"])
|
||||
def find_data_gaps(
|
||||
threshold_seconds: int = 70,
|
||||
start_date: Optional[str] = None,
|
||||
end_date: Optional[str] = None,
|
||||
sn: Optional[str] = None
|
||||
):
|
||||
"""
|
||||
Find gaps in the stored power readings.
|
||||
|
||||
A gap is detected when the time between consecutive readings exceeds the threshold.
|
||||
Since readings should occur every 65 seconds, a threshold of 70 seconds is reasonable.
|
||||
|
||||
Args:
|
||||
threshold_seconds: Gap threshold in seconds (default: 70)
|
||||
start_date: Start date in ISO format (optional)
|
||||
end_date: End date in ISO format (optional)
|
||||
sn: Serial number to filter by (optional)
|
||||
|
||||
Returns:
|
||||
List of gaps found with start/end times and duration
|
||||
"""
|
||||
from datetime import datetime
|
||||
|
||||
try:
|
||||
db = PowerReadingsDB()
|
||||
|
||||
# Get readings ordered by timestamp
|
||||
readings = db.get_readings(
|
||||
limit=10000, # Large limit to get comprehensive data
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
sn=sn
|
||||
)
|
||||
|
||||
if len(readings) < 2:
|
||||
return {
|
||||
"gaps_found": 0,
|
||||
"gaps": [],
|
||||
"message": "Need at least 2 readings to detect gaps"
|
||||
}
|
||||
|
||||
# Reverse to get chronological order (oldest first)
|
||||
readings = list(reversed(readings))
|
||||
|
||||
gaps = []
|
||||
|
||||
for i in range(len(readings) - 1):
|
||||
current = readings[i]
|
||||
next_reading = readings[i + 1]
|
||||
|
||||
# Parse timestamps
|
||||
current_time = datetime.fromisoformat(current['timestamp'])
|
||||
next_time = datetime.fromisoformat(next_reading['timestamp'])
|
||||
|
||||
# Calculate time difference
|
||||
time_diff = (next_time - current_time).total_seconds()
|
||||
|
||||
# If gap exceeds threshold, record it
|
||||
if time_diff > threshold_seconds:
|
||||
gap_duration_minutes = time_diff / 60
|
||||
missed_readings = int(time_diff / 65) - 1 # Estimate missed readings
|
||||
|
||||
gaps.append({
|
||||
"gap_start": current['timestamp'],
|
||||
"gap_end": next_reading['timestamp'],
|
||||
"duration_seconds": round(time_diff, 2),
|
||||
"duration_minutes": round(gap_duration_minutes, 2),
|
||||
"estimated_missed_readings": missed_readings,
|
||||
"last_reading_before_gap_id": current['id'],
|
||||
"first_reading_after_gap_id": next_reading['id']
|
||||
})
|
||||
|
||||
# Calculate summary statistics
|
||||
total_gap_time = sum(gap['duration_seconds'] for gap in gaps)
|
||||
|
||||
return {
|
||||
"gaps_found": len(gaps),
|
||||
"total_gap_duration_seconds": round(total_gap_time, 2),
|
||||
"total_gap_duration_hours": round(total_gap_time / 3600, 2),
|
||||
"threshold_seconds": threshold_seconds,
|
||||
"readings_analyzed": len(readings),
|
||||
"date_range": {
|
||||
"start": readings[0]['timestamp'],
|
||||
"end": readings[-1]['timestamp']
|
||||
},
|
||||
"gaps": gaps
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
import traceback
|
||||
return {"error": str(e), "traceback": traceback.format_exc()}
|
||||
|
||||
@app.get("/get_day_data", tags=["HanchuESS"])
|
||||
def get_day_data(date: str):
|
||||
"""
|
||||
Get a full day's worth of minute-by-minute power data from HanchuESS.
|
||||
|
||||
Args:
|
||||
date: Date in ISO format (e.g., "2026-01-13" or "2026-01-13T00:00:00")
|
||||
|
||||
Returns:
|
||||
Minute-by-minute power data for the entire day
|
||||
"""
|
||||
from service.hanchu_service import HanchuESSService
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
try:
|
||||
hanchu_service = HanchuESSService()
|
||||
|
||||
# Parse the date string
|
||||
try:
|
||||
# Try parsing with time
|
||||
day = datetime.fromisoformat(date.replace('Z', '+00:00'))
|
||||
except:
|
||||
# Try parsing just date
|
||||
day = datetime.strptime(date, "%Y-%m-%d")
|
||||
|
||||
# Set to UTC timezone if not already
|
||||
if day.tzinfo is None:
|
||||
day = day.replace(tzinfo=timezone.utc)
|
||||
|
||||
# Get start of day (00:00:00)
|
||||
start_of_day = day.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
start_ts = int(start_of_day.timestamp() * 1000)
|
||||
|
||||
# Get end of day (23:59:59.999)
|
||||
end_of_day = day.replace(hour=23, minute=59, second=59, microsecond=999000)
|
||||
end_ts = int(end_of_day.timestamp() * 1000)
|
||||
|
||||
# Get the minute chart data for the full day
|
||||
chart_data = hanchu_service.get_power_minute_chart(start_ts=start_ts, end_ts=end_ts)
|
||||
|
||||
return {
|
||||
"requested_date": date,
|
||||
"start_timestamp": start_ts,
|
||||
"end_timestamp": end_ts,
|
||||
"start_time": start_of_day.isoformat(),
|
||||
"end_time": end_of_day.isoformat(),
|
||||
"data": chart_data
|
||||
}
|
||||
except Exception as e:
|
||||
import traceback
|
||||
return {"error": str(e), "traceback": traceback.format_exc()}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# BACKFILL ENDPOINTS
|
||||
# ============================================================================
|
||||
|
||||
@app.post("/backfill/start", tags=["Backfill"])
|
||||
async def start_backfill(
|
||||
start_date: str,
|
||||
end_date: Optional[str] = None,
|
||||
delay_seconds: int = 2
|
||||
):
|
||||
"""
|
||||
Start backfilling historical data from start_date to end_date.
|
||||
|
||||
Args:
|
||||
start_date: Start date in ISO format (e.g., "2025-07-02")
|
||||
end_date: End date in ISO format (defaults to today)
|
||||
delay_seconds: Delay between API calls for rate limiting (default: 2)
|
||||
|
||||
Example:
|
||||
POST /backfill/start?start_date=2025-07-02&end_date=2026-01-22&delay_seconds=3
|
||||
"""
|
||||
backfill = get_backfill_service()
|
||||
result = await backfill.start_backfill(
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
delay_seconds=delay_seconds
|
||||
)
|
||||
return result
|
||||
|
||||
@app.post("/backfill/stop", tags=["Backfill"])
|
||||
async def stop_backfill():
|
||||
"""Stop the current backfill process."""
|
||||
backfill = get_backfill_service()
|
||||
result = await backfill.stop_backfill()
|
||||
return result
|
||||
|
||||
@app.get("/backfill/status", tags=["Backfill"])
|
||||
def get_backfill_status():
|
||||
"""
|
||||
Get current backfill status and progress.
|
||||
|
||||
Returns:
|
||||
Status including days processed, records inserted, estimated completion time
|
||||
"""
|
||||
backfill = get_backfill_service()
|
||||
return backfill.get_status()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(app, host="0.0.0.0", port=8050)
|
||||
306
v1/backend/src/service/backfill_service.py
Normal file
306
v1/backend/src/service/backfill_service.py
Normal file
@@ -0,0 +1,306 @@
|
||||
"""
|
||||
Backfill service for historical data collection.
|
||||
Fetches day-by-day minute data from HanchuESS API and fills database gaps.
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Optional, Dict, List
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from service.hanchu_service import HanchuESSService
|
||||
from database.db import PowerReadingsDB
|
||||
|
||||
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BackfillService:
|
||||
"""Service for backfilling historical power data."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize backfill service."""
|
||||
self.hanchu_service = HanchuESSService()
|
||||
self.db = PowerReadingsDB()
|
||||
self.is_running = False
|
||||
self.task: Optional[asyncio.Task] = None
|
||||
|
||||
# Progress tracking
|
||||
self.status = {
|
||||
"is_running": False,
|
||||
"start_date": None,
|
||||
"end_date": None,
|
||||
"current_date": None,
|
||||
"days_total": 0,
|
||||
"days_completed": 0,
|
||||
"days_remaining": 0,
|
||||
"records_inserted": 0,
|
||||
"records_skipped": 0,
|
||||
"conflicts_found": 0,
|
||||
"errors": [],
|
||||
"started_at": None,
|
||||
"estimated_completion": None
|
||||
}
|
||||
|
||||
# Configuration
|
||||
self.delay_between_days = 2 # seconds between API calls
|
||||
self.sn = self.hanchu_service.base_serial_number
|
||||
|
||||
async def start_backfill(
|
||||
self,
|
||||
start_date: str,
|
||||
end_date: Optional[str] = None,
|
||||
delay_seconds: int = 2
|
||||
) -> Dict:
|
||||
"""
|
||||
Start backfilling data from start_date to end_date.
|
||||
|
||||
Args:
|
||||
start_date: Start date in ISO format (e.g., "2025-07-02")
|
||||
end_date: End date in ISO format (defaults to today)
|
||||
delay_seconds: Delay between API calls for rate limiting
|
||||
|
||||
Returns:
|
||||
Status dictionary
|
||||
"""
|
||||
if self.is_running:
|
||||
return {"error": "Backfill already in progress"}
|
||||
|
||||
# Parse dates
|
||||
start = datetime.fromisoformat(start_date.split('T')[0])
|
||||
if end_date:
|
||||
end = datetime.fromisoformat(end_date.split('T')[0])
|
||||
else:
|
||||
end = datetime.now(timezone.utc).replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
|
||||
# Calculate total days
|
||||
total_days = (end - start).days + 1
|
||||
|
||||
# Initialize status
|
||||
self.status = {
|
||||
"is_running": True,
|
||||
"start_date": start.isoformat(),
|
||||
"end_date": end.isoformat(),
|
||||
"current_date": start.isoformat(),
|
||||
"days_total": total_days,
|
||||
"days_completed": 0,
|
||||
"days_remaining": total_days,
|
||||
"records_inserted": 0,
|
||||
"records_skipped": 0,
|
||||
"conflicts_found": 0,
|
||||
"errors": [],
|
||||
"started_at": datetime.now().isoformat(),
|
||||
"estimated_completion": None
|
||||
}
|
||||
|
||||
self.delay_between_days = delay_seconds
|
||||
self.is_running = True
|
||||
|
||||
# Start background task
|
||||
self.task = asyncio.create_task(
|
||||
self._backfill_loop(start, end)
|
||||
)
|
||||
|
||||
logger.info(f"Backfill started: {start_date} to {end.isoformat()} ({total_days} days)")
|
||||
return self.status
|
||||
|
||||
async def stop_backfill(self) -> Dict:
|
||||
"""Stop the backfill process."""
|
||||
if not self.is_running:
|
||||
return {"error": "No backfill in progress"}
|
||||
|
||||
self.is_running = False
|
||||
if self.task:
|
||||
self.task.cancel()
|
||||
try:
|
||||
await self.task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
logger.info("Backfill stopped")
|
||||
self.status["is_running"] = False
|
||||
return self.status
|
||||
|
||||
def get_status(self) -> Dict:
|
||||
"""Get current backfill status."""
|
||||
# Update estimated completion time
|
||||
if self.status["is_running"] and self.status["days_completed"] > 0:
|
||||
elapsed = (
|
||||
datetime.now() -
|
||||
datetime.fromisoformat(self.status["started_at"])
|
||||
).total_seconds()
|
||||
|
||||
avg_time_per_day = elapsed / self.status["days_completed"]
|
||||
remaining_seconds = avg_time_per_day * self.status["days_remaining"]
|
||||
|
||||
eta = datetime.now() + timedelta(seconds=remaining_seconds)
|
||||
self.status["estimated_completion"] = eta.isoformat()
|
||||
self.status["estimated_time_remaining_minutes"] = round(remaining_seconds / 60, 1)
|
||||
|
||||
return self.status
|
||||
|
||||
async def _backfill_loop(self, start_date: datetime, end_date: datetime):
|
||||
"""Main backfill loop."""
|
||||
current_date = start_date
|
||||
|
||||
while current_date <= end_date and self.is_running:
|
||||
try:
|
||||
date_str = current_date.strftime("%Y-%m-%d")
|
||||
self.status["current_date"] = date_str
|
||||
|
||||
logger.info(f"Processing {date_str}...")
|
||||
|
||||
# Check existing data coverage
|
||||
coverage = self.db.check_day_coverage(date_str, sn=self.sn)
|
||||
|
||||
if coverage["has_data"] and coverage["reading_count"] > 1400:
|
||||
# Day is well covered, skip
|
||||
logger.info(f" {date_str} already has {coverage['reading_count']} readings, skipping")
|
||||
self.status["days_completed"] += 1
|
||||
self.status["days_remaining"] -= 1
|
||||
current_date += timedelta(days=1)
|
||||
continue
|
||||
|
||||
# Fetch day data from API
|
||||
result = await self._fetch_and_store_day(current_date)
|
||||
|
||||
self.status["records_inserted"] += result["inserted"]
|
||||
self.status["records_skipped"] += result["skipped"]
|
||||
self.status["conflicts_found"] += result["conflicts"]
|
||||
|
||||
if result["error"]:
|
||||
self.status["errors"].append({
|
||||
"date": date_str,
|
||||
"error": result["error"]
|
||||
})
|
||||
|
||||
self.status["days_completed"] += 1
|
||||
self.status["days_remaining"] -= 1
|
||||
|
||||
logger.info(
|
||||
f" {date_str}: inserted={result['inserted']}, "
|
||||
f"skipped={result['skipped']}, conflicts={result['conflicts']}"
|
||||
)
|
||||
|
||||
# Rate limiting delay
|
||||
if self.is_running:
|
||||
await asyncio.sleep(self.delay_between_days)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing {current_date}: {e}", exc_info=True)
|
||||
self.status["errors"].append({
|
||||
"date": current_date.strftime("%Y-%m-%d"),
|
||||
"error": str(e)
|
||||
})
|
||||
|
||||
current_date += timedelta(days=1)
|
||||
|
||||
self.is_running = False
|
||||
self.status["is_running"] = False
|
||||
logger.info("Backfill completed")
|
||||
|
||||
async def _fetch_and_store_day(self, date: datetime) -> Dict:
|
||||
"""
|
||||
Fetch minute data for a day and store in database.
|
||||
|
||||
Returns:
|
||||
Dictionary with counts: inserted, skipped, conflicts, error
|
||||
"""
|
||||
result = {
|
||||
"inserted": 0,
|
||||
"skipped": 0,
|
||||
"conflicts": 0,
|
||||
"error": None
|
||||
}
|
||||
|
||||
try:
|
||||
# Calculate timestamps for the day
|
||||
start_of_day = date.replace(hour=0, minute=0, second=0, microsecond=0, tzinfo=timezone.utc)
|
||||
end_of_day = date.replace(hour=23, minute=59, second=59, microsecond=999000, tzinfo=timezone.utc)
|
||||
|
||||
start_ts = int(start_of_day.timestamp() * 1000)
|
||||
end_ts = int(end_of_day.timestamp() * 1000)
|
||||
|
||||
# Fetch minute chart data
|
||||
api_response = await asyncio.to_thread(
|
||||
self.hanchu_service.get_power_minute_chart,
|
||||
start_ts=start_ts,
|
||||
end_ts=end_ts
|
||||
)
|
||||
|
||||
# Check if successful
|
||||
if not api_response.get("success"):
|
||||
result["error"] = f"API returned success=false: {api_response}"
|
||||
return result
|
||||
|
||||
# Extract data points
|
||||
data = api_response.get("data", {})
|
||||
|
||||
# Check if data is a list or dict
|
||||
if isinstance(data, list):
|
||||
# Data is an array of readings
|
||||
logger.info(f" Received {len(data)} data points")
|
||||
|
||||
# Process each data point
|
||||
for idx, point in enumerate(data):
|
||||
try:
|
||||
if isinstance(point, dict):
|
||||
# Extract timestamp from dataTimeTs field (milliseconds)
|
||||
if 'dataTimeTs' in point:
|
||||
timestamp_ms = point['dataTimeTs']
|
||||
point_time = datetime.fromtimestamp(timestamp_ms / 1000, tz=timezone.utc)
|
||||
timestamp_str = point_time.isoformat()
|
||||
else:
|
||||
# Fallback: generate timestamp based on index (minute of the day)
|
||||
point_time = start_of_day + timedelta(minutes=idx)
|
||||
timestamp_str = point_time.isoformat()
|
||||
|
||||
row_id = self.db.insert_minute_reading(
|
||||
timestamp=timestamp_str,
|
||||
data=point,
|
||||
sn=self.sn,
|
||||
data_source='backfill'
|
||||
)
|
||||
|
||||
if row_id:
|
||||
result["inserted"] += 1
|
||||
else:
|
||||
result["skipped"] += 1
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f" Error inserting point {idx}: {e}")
|
||||
result["conflicts"] += 1
|
||||
|
||||
elif isinstance(data, dict):
|
||||
# Data is a dictionary, log keys to understand structure
|
||||
logger.info(f" Received API response with keys: {list(data.keys())}")
|
||||
|
||||
# Check for common array keys that might contain the data
|
||||
for key in ['data', 'points', 'readings', 'values', 'chart']:
|
||||
if key in data and isinstance(data[key], list):
|
||||
logger.info(f" Found array in '{key}' with {len(data[key])} items")
|
||||
break
|
||||
|
||||
# Mark as processed but note we need to implement parsing
|
||||
result["error"] = f"Need to implement parsing for dict structure: {list(data.keys())}"
|
||||
|
||||
else:
|
||||
result["error"] = f"Unexpected data type: {type(data)}"
|
||||
|
||||
except Exception as e:
|
||||
result["error"] = str(e)
|
||||
logger.error(f"Error fetching day data: {e}")
|
||||
|
||||
return result
|
||||
|
||||
|
||||
# Global backfill service instance
|
||||
_backfill_service: Optional[BackfillService] = None
|
||||
|
||||
|
||||
def get_backfill_service() -> BackfillService:
|
||||
"""Get or create the global backfill service instance."""
|
||||
global _backfill_service
|
||||
if _backfill_service is None:
|
||||
_backfill_service = BackfillService()
|
||||
return _backfill_service
|
||||
266
v1/backend/src/service/hanchu_service.py
Normal file
266
v1/backend/src/service/hanchu_service.py
Normal file
@@ -0,0 +1,266 @@
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
|
||||
from Crypto.Cipher import AES, PKCS1_v1_5
|
||||
from Crypto.PublicKey import RSA
|
||||
from Crypto.Util.Padding import pad, unpad
|
||||
from dotenv import load_dotenv
|
||||
|
||||
|
||||
load_dotenv()
|
||||
|
||||
|
||||
class HanchuESSService:
|
||||
# RSA public key from the JavaScript code
|
||||
RSA_PUBLIC_KEY = """-----BEGIN PUBLIC KEY-----
|
||||
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVg7RFDLMGM4O98d1zWKI5RQan
|
||||
jci3iY4qlpgsH76fUn3GnZtqjbRk37lCQDv6AhgPNXRPpty81+g909/c4yzySKaP
|
||||
CcDZv7KdCRB1mVxkq+0z4EtKx9EoTXKnFSDBaYi2srdal1tM3gGOsNTDN58CzYPX
|
||||
nDGPX7+EHS1Mm4aVDQIDAQAB
|
||||
-----END PUBLIC KEY-----"""
|
||||
def __init__(self):
|
||||
self.name = "HanchuESS Service"
|
||||
|
||||
# Load config from environment
|
||||
self.aes_key = os.environ["HANCHU_AES_KEY"].encode("utf-8")
|
||||
self.aes_iv = os.environ["HANCHU_AES_IV"].encode("utf-8")
|
||||
self.login_url = os.environ["HANCHU_LOGIN_URL"]
|
||||
self.login_type = os.getenv("HANCHU_LOGIN_TYPE", "ACCOUNT")
|
||||
self.timeout = int(os.getenv("HANCHU_HTTP_TIMEOUT", "10"))
|
||||
self.verify_ssl = os.getenv("HANCHU_VERIFY_SSL", "true").lower() == "true"
|
||||
self.hanchu_username = os.getenv("HANCHU_USERNAME", "")
|
||||
self.hanchu_password = os.getenv("HANCHU_PASSWORD", "")
|
||||
self.base_serial_number = os.getenv("BASE_SERIAL_NUMBER", "")
|
||||
|
||||
# Cache for access token
|
||||
self._access_token = None
|
||||
|
||||
# Safety checks
|
||||
if len(self.aes_key) not in (16, 24, 32):
|
||||
raise ValueError("AES key must be 16, 24, or 32 bytes")
|
||||
if len(self.aes_iv) != 16:
|
||||
raise ValueError("AES IV must be exactly 16 bytes")
|
||||
|
||||
def encrypt_payload(self, data: dict | str) -> str:
|
||||
"""
|
||||
Encrypt payload using AES-CBC and return base64 string.
|
||||
"""
|
||||
if not isinstance(data, str):
|
||||
data = json.dumps(data, separators=(",", ":"))
|
||||
|
||||
cipher = AES.new(self.aes_key, AES.MODE_CBC, self.aes_iv)
|
||||
ciphertext = cipher.encrypt(pad(data.encode("utf-8"), AES.block_size))
|
||||
return base64.b64encode(ciphertext).decode("utf-8")
|
||||
|
||||
def decrypt_payload(self, encrypted_data: str) -> dict | str:
|
||||
"""
|
||||
Decrypt base64-encoded AES-CBC payload and return the original data.
|
||||
"""
|
||||
ciphertext = base64.b64decode(encrypted_data)
|
||||
cipher = AES.new(self.aes_key, AES.MODE_CBC, self.aes_iv)
|
||||
decrypted = unpad(cipher.decrypt(ciphertext), AES.block_size)
|
||||
decrypted_str = decrypted.decode("utf-8")
|
||||
|
||||
# Try to parse as JSON, return string if it fails
|
||||
try:
|
||||
return json.loads(decrypted_str)
|
||||
except json.JSONDecodeError:
|
||||
return decrypted_str
|
||||
|
||||
def encrypt_password_rsa(self, password: str) -> str:
|
||||
"""
|
||||
Encrypt password using RSA public key (matches JavaScript GO function).
|
||||
Returns base64-encoded encrypted password.
|
||||
"""
|
||||
public_key = RSA.import_key(self.RSA_PUBLIC_KEY)
|
||||
cipher = PKCS1_v1_5.new(public_key)
|
||||
encrypted = cipher.encrypt(password.encode('utf-8'))
|
||||
return base64.b64encode(encrypted).decode('utf-8')
|
||||
|
||||
def get_access_token(self) -> str:
|
||||
"""
|
||||
Authenticate with Hanchu ESS and return access token.
|
||||
Uses double encryption: RSA for password, then AES for entire payload.
|
||||
Caches the token to avoid unnecessary logins.
|
||||
"""
|
||||
# Return cached token if available
|
||||
if self._access_token:
|
||||
return self._access_token
|
||||
|
||||
# Step 1: RSA encrypt the password
|
||||
encrypted_password = self.encrypt_password_rsa(self.hanchu_password)
|
||||
|
||||
# Step 2: Build payload with encrypted password
|
||||
payload = {
|
||||
"account": self.hanchu_username,
|
||||
"pwd": encrypted_password,
|
||||
"loginType": self.login_type,
|
||||
}
|
||||
|
||||
# Step 3: AES encrypt the entire payload
|
||||
encrypted_payload = self.encrypt_payload(payload)
|
||||
|
||||
# Step 4: Send to API with correct headers
|
||||
headers = {
|
||||
"Content-Type": "text/plain",
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
|
||||
"appplat": "iess",
|
||||
"locale": "en",
|
||||
"timezone": "Africa/Accra",
|
||||
"timeselected": "GMT",
|
||||
"version": "1.0",
|
||||
"crypto-version": "1.0.0",
|
||||
"Origin": "https://iess3.hanchuess.com",
|
||||
"Referer": "https://iess3.hanchuess.com/login",
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
self.login_url,
|
||||
data=encrypted_payload,
|
||||
headers=headers,
|
||||
timeout=self.timeout,
|
||||
verify=self.verify_ssl,
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
|
||||
try:
|
||||
# The token is directly in the 'data' field as a JWT string
|
||||
if result.get("success") and result.get("data"):
|
||||
self._access_token = result["data"]
|
||||
return self._access_token
|
||||
else:
|
||||
raise RuntimeError(
|
||||
f"Hanchu login failed: {json.dumps(result, ensure_ascii=False)}"
|
||||
)
|
||||
except (KeyError, TypeError):
|
||||
raise RuntimeError(
|
||||
f"Hanchu login failed: {json.dumps(result, ensure_ascii=False)}"
|
||||
)
|
||||
|
||||
def get_power_chart(self, access_token: str = None) -> dict:
|
||||
"""
|
||||
Get 65-second power chart data from HanchuESS.
|
||||
|
||||
Args:
|
||||
access_token: Optional JWT token from login. If not provided, will get one automatically.
|
||||
|
||||
Returns:
|
||||
Power chart data from the API
|
||||
"""
|
||||
# Get access token if not provided
|
||||
if not access_token:
|
||||
# Check if we have a cached token first
|
||||
if self._access_token:
|
||||
access_token = self._access_token
|
||||
else:
|
||||
access_token = self.get_access_token()
|
||||
|
||||
# Build payload with serial number
|
||||
payload = {"sn": self.base_serial_number}
|
||||
|
||||
# AES encrypt the payload
|
||||
encrypted_payload = self.encrypt_payload(payload)
|
||||
|
||||
# Send to API with access token
|
||||
url = "https://iess3.hanchuess.com/gateway/platform/pcs/powerChart"
|
||||
headers = {
|
||||
"Content-Type": "text/plain",
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
|
||||
"appplat": "iess",
|
||||
"locale": "en",
|
||||
"timezone": "Africa/Accra",
|
||||
"timeselected": "GMT",
|
||||
"version": "1.0",
|
||||
"crypto-version": "1.0.0",
|
||||
"access-token": access_token,
|
||||
"Origin": "https://iess3.hanchuess.com",
|
||||
"Referer": "https://iess3.hanchuess.com/",
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
url,
|
||||
data=encrypted_payload,
|
||||
headers=headers,
|
||||
timeout=self.timeout,
|
||||
verify=self.verify_ssl,
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def get_power_minute_chart(self, access_token: str = None, start_ts: int = None, end_ts: int = None) -> dict:
|
||||
"""
|
||||
Get minute-by-minute power chart data from HanchuESS.
|
||||
|
||||
Args:
|
||||
access_token: Optional JWT token from login. If not provided, will get one automatically.
|
||||
start_ts: Start timestamp in milliseconds. If not provided, defaults to start of today.
|
||||
end_ts: End timestamp in milliseconds. If not provided, defaults to end of today.
|
||||
|
||||
Returns:
|
||||
Power minute chart data from the API
|
||||
"""
|
||||
# Get access token if not provided
|
||||
if not access_token:
|
||||
# Check if we have a cached token first
|
||||
if self._access_token:
|
||||
access_token = self._access_token
|
||||
else:
|
||||
access_token = self.get_access_token()
|
||||
|
||||
# Set default timestamps if not provided (today's data)
|
||||
if not start_ts or not end_ts:
|
||||
from datetime import datetime, timezone, timedelta
|
||||
now = datetime.now(timezone.utc)
|
||||
# 30 minutes ago
|
||||
start_time = now - timedelta(minutes=30)
|
||||
start_ts = int(start_time.timestamp() * 1000)
|
||||
# Current time
|
||||
end_ts = int(now.timestamp() * 1000)
|
||||
|
||||
# Build payload
|
||||
payload = {
|
||||
"sn": self.base_serial_number,
|
||||
"devType": "2",
|
||||
"maxCount": 1440, # 24 hours * 60 minutes
|
||||
"dataTimeTsEnd": end_ts,
|
||||
"dataTimeTsStart": start_ts,
|
||||
"masterSum": True
|
||||
}
|
||||
|
||||
# AES encrypt the payload
|
||||
encrypted_payload = self.encrypt_payload(payload)
|
||||
|
||||
# Send to API with access token
|
||||
url = "https://iess3.hanchuess.com/gateway/platform/pcs/powerMinuteChart"
|
||||
headers = {
|
||||
"Content-Type": "text/plain",
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
|
||||
"appplat": "iess",
|
||||
"locale": "en",
|
||||
"timezone": "Africa/Accra",
|
||||
"timeselected": "GMT",
|
||||
"version": "1.0",
|
||||
"crypto-version": "1.0.0",
|
||||
"access-token": access_token,
|
||||
"Origin": "https://iess3.hanchuess.com",
|
||||
"Referer": "https://iess3.hanchuess.com/",
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
url,
|
||||
data=encrypted_payload,
|
||||
headers=headers,
|
||||
timeout=self.timeout,
|
||||
verify=self.verify_ssl,
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
119
v1/backend/src/service/monitoring_service.py
Normal file
119
v1/backend/src/service/monitoring_service.py
Normal file
@@ -0,0 +1,119 @@
|
||||
"""
|
||||
Monitoring service for continuous power data collection.
|
||||
Polls the HanchuESS API every 65 seconds and stores data in SQLite.
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Optional
|
||||
from datetime import datetime
|
||||
|
||||
from service.hanchu_service import HanchuESSService
|
||||
from database.db import PowerReadingsDB
|
||||
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MonitoringService:
|
||||
"""Background service for monitoring power readings."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize monitoring service."""
|
||||
self.hanchu_service = HanchuESSService()
|
||||
self.db = PowerReadingsDB()
|
||||
self.is_running = False
|
||||
self.task: Optional[asyncio.Task] = None
|
||||
self.poll_interval = 65 # seconds
|
||||
|
||||
async def start(self):
|
||||
"""Start the monitoring service."""
|
||||
if self.is_running:
|
||||
logger.warning("Monitoring service is already running")
|
||||
return False
|
||||
|
||||
self.is_running = True
|
||||
self.task = asyncio.create_task(self._monitoring_loop())
|
||||
logger.info("Monitoring service started")
|
||||
return True
|
||||
|
||||
async def stop(self):
|
||||
"""Stop the monitoring service."""
|
||||
if not self.is_running:
|
||||
logger.warning("Monitoring service is not running")
|
||||
return False
|
||||
|
||||
self.is_running = False
|
||||
if self.task:
|
||||
self.task.cancel()
|
||||
try:
|
||||
await self.task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
logger.info("Monitoring service stopped")
|
||||
return True
|
||||
|
||||
def get_status(self) -> dict:
|
||||
"""Get current monitoring status."""
|
||||
return {
|
||||
"is_running": self.is_running,
|
||||
"poll_interval": self.poll_interval,
|
||||
"db_path": self.db.db_path,
|
||||
"db_stats": self.db.get_stats()
|
||||
}
|
||||
|
||||
async def _monitoring_loop(self):
|
||||
"""Main monitoring loop that runs every 65 seconds."""
|
||||
logger.info(f"Monitoring loop started (polling every {self.poll_interval}s)")
|
||||
|
||||
while self.is_running:
|
||||
try:
|
||||
# Fetch power data from API
|
||||
logger.info("Fetching power chart data...")
|
||||
power_data = await asyncio.to_thread(
|
||||
self.hanchu_service.get_power_chart
|
||||
)
|
||||
|
||||
# Check if API call was successful
|
||||
if power_data.get('success'):
|
||||
# Insert into database with 'realtime' source
|
||||
reading_id = self.db.insert_reading(power_data, data_source='realtime')
|
||||
logger.info(
|
||||
f"Stored reading ID {reading_id} at {datetime.now().isoformat()}"
|
||||
)
|
||||
|
||||
# Log some key metrics
|
||||
data = power_data.get('data', {})
|
||||
logger.info(
|
||||
f" PV: {data.get('pvTtPwr')}W | "
|
||||
f"Battery: {data.get('batP')}W ({data.get('batSoc')}%) | "
|
||||
f"Load: {data.get('loadPwr')}W"
|
||||
)
|
||||
else:
|
||||
logger.error(f"API call failed: {power_data}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in monitoring loop: {e}", exc_info=True)
|
||||
|
||||
# Wait for next poll interval
|
||||
try:
|
||||
await asyncio.sleep(self.poll_interval)
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Monitoring loop cancelled")
|
||||
break
|
||||
|
||||
logger.info("Monitoring loop ended")
|
||||
|
||||
|
||||
# Global monitoring service instance
|
||||
_monitoring_service: Optional[MonitoringService] = None
|
||||
|
||||
|
||||
def get_monitoring_service() -> MonitoringService:
|
||||
"""Get or create the global monitoring service instance."""
|
||||
global _monitoring_service
|
||||
if _monitoring_service is None:
|
||||
_monitoring_service = MonitoringService()
|
||||
return _monitoring_service
|
||||
25
v1/docker-compose.yml
Normal file
25
v1/docker-compose.yml
Normal file
@@ -0,0 +1,25 @@
|
||||
services:
|
||||
hanchuess-solar-backend:
|
||||
build:
|
||||
context: backend
|
||||
dockerfile: docker/Dockerfile
|
||||
container_name: hanchuess-solar-backend
|
||||
ports:
|
||||
- "8050:8050"
|
||||
volumes:
|
||||
- sqlite_data:/data
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- HANCHU_AES_KEY=${HANCHU_AES_KEY}
|
||||
- HANCHU_AES_IV=${HANCHU_AES_IV}
|
||||
- HANCHU_LOGIN_URL=${HANCHU_LOGIN_URL}
|
||||
- HANCHU_LOGIN_TYPE=${HANCHU_LOGIN_TYPE:-ACCOUNT}
|
||||
- HANCHU_HTTP_TIMEOUT=${HANCHU_HTTP_TIMEOUT:-10}
|
||||
- HANCHU_VERIFY_SSL=${HANCHU_VERIFY_SSL:-true}
|
||||
- HANCHU_USERNAME=${HANCHU_USERNAME:-}
|
||||
- HANCHU_PASSWORD=${HANCHU_PASSWORD:-}
|
||||
- SQLITE_DB_PATH=${SQLITE_DB_PATH:-}
|
||||
|
||||
volumes:
|
||||
sqlite_data:
|
||||
24
v2/.env.example
Normal file
24
v2/.env.example
Normal file
@@ -0,0 +1,24 @@
|
||||
# HanchuESS API Configuration
|
||||
# Copy this file to .env and fill in your actual values
|
||||
|
||||
# Required: AES encryption key (must be 16, 24, or 32 bytes)
|
||||
HANCHU_AES_KEY=your_aes_key_here
|
||||
|
||||
# Required: AES initialization vector (must be 16 bytes)
|
||||
HANCHU_AES_IV=your_aes_iv_here
|
||||
|
||||
# Required: Login URL for the HanchuESS API
|
||||
HANCHU_LOGIN_URL=https://api.example.com/login
|
||||
|
||||
# Optional: Login type (default: ACCOUNT)
|
||||
HANCHU_LOGIN_TYPE=ACCOUNT
|
||||
|
||||
# Optional: HTTP timeout in seconds (default: 10)
|
||||
HANCHU_HTTP_TIMEOUT=10
|
||||
|
||||
# Optional: Verify SSL certificates (default: true, set to false for self-signed certs)
|
||||
HANCHU_VERIFY_SSL=true
|
||||
|
||||
# Optional: Username and password
|
||||
HANCHU_USERNAME=
|
||||
HANCHU_PASSWORD=
|
||||
176
v2/.gitignore
vendored
Normal file
176
v2/.gitignore
vendored
Normal file
@@ -0,0 +1,176 @@
|
||||
# ---> Python
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# UV
|
||||
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
#uv.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
||||
.pdm.toml
|
||||
.pdm-python
|
||||
.pdm-build/
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
# Ruff stuff:
|
||||
.ruff_cache/
|
||||
|
||||
# PyPI configuration file
|
||||
.pypirc
|
||||
|
||||
12
v2/backend/docker/Dockerfile
Normal file
12
v2/backend/docker/Dockerfile
Normal file
@@ -0,0 +1,12 @@
|
||||
FROM python:3.13.7-slim-bookworm
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Copy requirements and install Python dependencies
|
||||
COPY ./requirements.txt ./
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy the app code and tests
|
||||
COPY ../src ./src
|
||||
|
||||
CMD ["python", "src/main.py"]
|
||||
8
v2/backend/requirements.txt
Normal file
8
v2/backend/requirements.txt
Normal file
@@ -0,0 +1,8 @@
|
||||
fastapi
|
||||
uvicorn[standard]
|
||||
pycryptodome>=3.20.0
|
||||
python-dotenv>=1.0.1
|
||||
requests>=2.31.0
|
||||
bcrypt>=4.0.1
|
||||
python-jose[cryptography]>=3.3.0
|
||||
cryptography>=41.0.0
|
||||
143
v2/backend/src/auth.py
Normal file
143
v2/backend/src/auth.py
Normal file
@@ -0,0 +1,143 @@
|
||||
"""
|
||||
Authentication utilities for user management and JWT tokens.
|
||||
"""
|
||||
import os
|
||||
import bcrypt
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import Depends, HTTPException, status
|
||||
from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials
|
||||
from jose import JWTError, jwt
|
||||
from dotenv import load_dotenv
|
||||
|
||||
|
||||
load_dotenv()
|
||||
|
||||
# JWT settings
|
||||
SECRET_KEY = os.getenv("JWT_SECRET_KEY", "your-secret-key-change-this-in-production")
|
||||
ALGORITHM = "HS256"
|
||||
ACCESS_TOKEN_EXPIRE_MINUTES = 15 # Short-lived access tokens
|
||||
REFRESH_TOKEN_EXPIRE_DAYS = 7 # Long-lived refresh tokens
|
||||
|
||||
# Security scheme
|
||||
security = HTTPBearer()
|
||||
|
||||
|
||||
def hash_password(password: str) -> str:
|
||||
"""Hash a password using bcrypt."""
|
||||
# Bcrypt has a 72 byte limit, truncate if necessary
|
||||
if len(password.encode('utf-8')) > 72:
|
||||
password = password[:72]
|
||||
|
||||
password_bytes = password.encode('utf-8')
|
||||
salt = bcrypt.gensalt()
|
||||
hashed = bcrypt.hashpw(password_bytes, salt)
|
||||
|
||||
return hashed.decode('utf-8')
|
||||
|
||||
|
||||
def verify_password(plain_password: str, hashed_password: str) -> bool:
|
||||
"""Verify a password against a hash."""
|
||||
# Bcrypt has a 72 byte limit, truncate if necessary
|
||||
if len(plain_password.encode('utf-8')) > 72:
|
||||
plain_password = plain_password[:72]
|
||||
|
||||
password_bytes = plain_password.encode('utf-8')
|
||||
hashed_bytes = hashed_password.encode('utf-8')
|
||||
|
||||
return bcrypt.checkpw(password_bytes, hashed_bytes)
|
||||
|
||||
|
||||
def create_access_token(data: dict, expires_delta: Optional[timedelta] = None) -> str:
|
||||
"""
|
||||
Create a JWT access token.
|
||||
|
||||
Args:
|
||||
data: Dictionary to encode in the token
|
||||
expires_delta: Optional expiration time delta
|
||||
|
||||
Returns:
|
||||
Encoded JWT token string
|
||||
"""
|
||||
to_encode = data.copy()
|
||||
|
||||
if expires_delta:
|
||||
expire = datetime.utcnow() + expires_delta
|
||||
else:
|
||||
expire = datetime.utcnow() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
|
||||
|
||||
to_encode.update({"exp": expire, "type": "access"})
|
||||
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
||||
|
||||
return encoded_jwt
|
||||
|
||||
|
||||
def create_refresh_token(data: dict) -> str:
|
||||
"""
|
||||
Create a JWT refresh token.
|
||||
|
||||
Args:
|
||||
data: Dictionary to encode in the token
|
||||
|
||||
Returns:
|
||||
Encoded JWT refresh token string
|
||||
"""
|
||||
to_encode = data.copy()
|
||||
expire = datetime.utcnow() + timedelta(days=REFRESH_TOKEN_EXPIRE_DAYS)
|
||||
|
||||
to_encode.update({"exp": expire, "type": "refresh"})
|
||||
encoded_jwt = jwt.encode(to_encode, SECRET_KEY, algorithm=ALGORITHM)
|
||||
|
||||
return encoded_jwt
|
||||
|
||||
|
||||
def decode_access_token(token: str) -> dict:
|
||||
"""
|
||||
Decode and verify a JWT access token.
|
||||
|
||||
Args:
|
||||
token: JWT token string
|
||||
|
||||
Returns:
|
||||
Decoded token payload
|
||||
|
||||
Raises:
|
||||
HTTPException: If token is invalid or expired
|
||||
"""
|
||||
try:
|
||||
payload = jwt.decode(token, SECRET_KEY, algorithms=[ALGORITHM])
|
||||
return payload
|
||||
except JWTError:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Could not validate credentials",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
|
||||
def get_current_user(credentials: HTTPAuthorizationCredentials = Depends(security)) -> dict:
|
||||
"""
|
||||
Dependency to get current authenticated user from JWT token.
|
||||
|
||||
Args:
|
||||
credentials: HTTP Bearer credentials from request
|
||||
|
||||
Returns:
|
||||
User data from token
|
||||
|
||||
Raises:
|
||||
HTTPException: If authentication fails
|
||||
"""
|
||||
token = credentials.credentials
|
||||
payload = decode_access_token(token)
|
||||
|
||||
username = payload.get("sub")
|
||||
if username is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Could not validate credentials",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
return {"username": username}
|
||||
722
v2/backend/src/database/db.py
Normal file
722
v2/backend/src/database/db.py
Normal file
@@ -0,0 +1,722 @@
|
||||
"""
|
||||
Database module for storing HanchuESS station data.
|
||||
Uses SQLite with relational structure for station info and related devices.
|
||||
"""
|
||||
import os
|
||||
import sqlite3
|
||||
import json
|
||||
from datetime import datetime
|
||||
from typing import Dict, List, Optional
|
||||
from contextlib import contextmanager
|
||||
|
||||
|
||||
class UserDB:
|
||||
"""Database manager for user authentication."""
|
||||
|
||||
def __init__(self, db_path: str = None):
|
||||
"""
|
||||
Initialize database connection.
|
||||
|
||||
Args:
|
||||
db_path: Path to SQLite database file. If None, reads from SQLITE_DB_PATH env var.
|
||||
"""
|
||||
if db_path is None:
|
||||
db_path = os.getenv("SQLITE_DB_PATH", "./hanchuess_solar.db")
|
||||
|
||||
self.db_path = db_path
|
||||
self._init_database()
|
||||
|
||||
@contextmanager
|
||||
def _get_connection(self):
|
||||
"""Context manager for database connections."""
|
||||
conn = sqlite3.connect(self.db_path, check_same_thread=False)
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
yield conn
|
||||
conn.commit()
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
raise e
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def _init_database(self):
|
||||
"""Create users table if it doesn't exist."""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
username TEXT UNIQUE NOT NULL,
|
||||
hashed_password TEXT NOT NULL,
|
||||
hanchu_username TEXT NOT NULL,
|
||||
hanchu_password TEXT NOT NULL,
|
||||
created_at DATETIME NOT NULL,
|
||||
updated_at DATETIME NOT NULL
|
||||
)
|
||||
""")
|
||||
|
||||
cursor.execute("""
|
||||
CREATE INDEX IF NOT EXISTS idx_username
|
||||
ON users(username)
|
||||
""")
|
||||
|
||||
# Refresh tokens table
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS refresh_tokens (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
token TEXT UNIQUE NOT NULL,
|
||||
username TEXT NOT NULL,
|
||||
created_at DATETIME NOT NULL,
|
||||
expires_at DATETIME NOT NULL,
|
||||
revoked BOOLEAN DEFAULT 0,
|
||||
|
||||
FOREIGN KEY (username) REFERENCES users(username) ON DELETE CASCADE
|
||||
)
|
||||
""")
|
||||
|
||||
cursor.execute("""
|
||||
CREATE INDEX IF NOT EXISTS idx_refresh_token
|
||||
ON refresh_tokens(token)
|
||||
""")
|
||||
|
||||
cursor.execute("""
|
||||
CREATE INDEX IF NOT EXISTS idx_refresh_username
|
||||
ON refresh_tokens(username)
|
||||
""")
|
||||
|
||||
conn.commit()
|
||||
|
||||
def store_refresh_token(self, token: str, username: str, expires_at: str) -> int:
|
||||
"""
|
||||
Store a refresh token.
|
||||
|
||||
Args:
|
||||
token: Refresh token string
|
||||
username: Username the token belongs to
|
||||
expires_at: ISO format expiration datetime
|
||||
|
||||
Returns:
|
||||
Token ID
|
||||
"""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
now = datetime.now().isoformat()
|
||||
|
||||
cursor.execute("""
|
||||
INSERT INTO refresh_tokens (token, username, created_at, expires_at)
|
||||
VALUES (?, ?, ?, ?)
|
||||
""", (token, username, now, expires_at))
|
||||
|
||||
return cursor.lastrowid
|
||||
|
||||
def get_refresh_token(self, token: str) -> Optional[Dict]:
|
||||
"""
|
||||
Get refresh token details.
|
||||
|
||||
Args:
|
||||
token: Refresh token string
|
||||
|
||||
Returns:
|
||||
Token dictionary or None if not found
|
||||
"""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("""
|
||||
SELECT * FROM refresh_tokens
|
||||
WHERE token = ? AND revoked = 0
|
||||
""", (token,))
|
||||
row = cursor.fetchone()
|
||||
|
||||
if row:
|
||||
return dict(row)
|
||||
return None
|
||||
|
||||
def revoke_refresh_token(self, token: str) -> bool:
|
||||
"""
|
||||
Revoke a refresh token.
|
||||
|
||||
Args:
|
||||
token: Refresh token string
|
||||
|
||||
Returns:
|
||||
True if revoked, False if token not found
|
||||
"""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("""
|
||||
UPDATE refresh_tokens
|
||||
SET revoked = 1
|
||||
WHERE token = ?
|
||||
""", (token,))
|
||||
|
||||
return cursor.rowcount > 0
|
||||
|
||||
def revoke_all_user_tokens(self, username: str) -> int:
|
||||
"""
|
||||
Revoke all refresh tokens for a user.
|
||||
|
||||
Args:
|
||||
username: Username
|
||||
|
||||
Returns:
|
||||
Number of tokens revoked
|
||||
"""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("""
|
||||
UPDATE refresh_tokens
|
||||
SET revoked = 1
|
||||
WHERE username = ? AND revoked = 0
|
||||
""", (username,))
|
||||
|
||||
return cursor.rowcount
|
||||
|
||||
def cleanup_expired_tokens(self) -> int:
|
||||
"""
|
||||
Delete expired refresh tokens.
|
||||
|
||||
Returns:
|
||||
Number of tokens deleted
|
||||
"""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
now = datetime.now().isoformat()
|
||||
|
||||
cursor.execute("""
|
||||
DELETE FROM refresh_tokens
|
||||
WHERE expires_at < ?
|
||||
""", (now,))
|
||||
|
||||
return cursor.rowcount
|
||||
|
||||
def create_user(self, username: str, hashed_password: str,
|
||||
hanchu_username: str, hanchu_password: str) -> int:
|
||||
"""
|
||||
Create a new user.
|
||||
|
||||
Args:
|
||||
username: User's login username
|
||||
hashed_password: Bcrypt hashed password
|
||||
hanchu_username: HanchuESS API username
|
||||
hanchu_password: HanchuESS API password
|
||||
|
||||
Returns:
|
||||
User ID
|
||||
|
||||
Raises:
|
||||
sqlite3.IntegrityError: If username already exists
|
||||
"""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
now = datetime.now().isoformat()
|
||||
|
||||
cursor.execute("""
|
||||
INSERT INTO users (username, hashed_password, hanchu_username,
|
||||
hanchu_password, created_at, updated_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""", (username, hashed_password, hanchu_username, hanchu_password, now, now))
|
||||
|
||||
return cursor.lastrowid
|
||||
|
||||
def get_user_by_username(self, username: str) -> Optional[Dict]:
|
||||
"""
|
||||
Get user by username.
|
||||
|
||||
Args:
|
||||
username: Username to look up
|
||||
|
||||
Returns:
|
||||
User dictionary or None if not found
|
||||
"""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT * FROM users WHERE username = ?", (username,))
|
||||
row = cursor.fetchone()
|
||||
|
||||
if row:
|
||||
return dict(row)
|
||||
return None
|
||||
|
||||
def update_hanchu_credentials(self, username: str, hanchu_username: str,
|
||||
hanchu_password: str) -> bool:
|
||||
"""
|
||||
Update HanchuESS credentials for a user.
|
||||
|
||||
Args:
|
||||
username: User's login username
|
||||
hanchu_username: New HanchuESS username
|
||||
hanchu_password: New HanchuESS password
|
||||
|
||||
Returns:
|
||||
True if updated, False if user not found
|
||||
"""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
now = datetime.now().isoformat()
|
||||
|
||||
cursor.execute("""
|
||||
UPDATE users
|
||||
SET hanchu_username = ?, hanchu_password = ?, updated_at = ?
|
||||
WHERE username = ?
|
||||
""", (hanchu_username, hanchu_password, now, username))
|
||||
|
||||
return cursor.rowcount > 0
|
||||
|
||||
|
||||
class StationDB:
|
||||
"""Database manager for HanchuESS station data."""
|
||||
|
||||
def __init__(self, db_path: str = None):
|
||||
"""
|
||||
Initialize database connection.
|
||||
|
||||
Args:
|
||||
db_path: Path to SQLite database file. If None, reads from SQLITE_DB_PATH env var.
|
||||
"""
|
||||
if db_path is None:
|
||||
db_path = os.getenv("SQLITE_DB_PATH", "./hanchuess_solar.db")
|
||||
|
||||
self.db_path = db_path
|
||||
self._init_database()
|
||||
|
||||
@contextmanager
|
||||
def _get_connection(self):
|
||||
"""Context manager for database connections."""
|
||||
conn = sqlite3.connect(self.db_path, check_same_thread=False)
|
||||
conn.row_factory = sqlite3.Row
|
||||
try:
|
||||
yield conn
|
||||
conn.commit()
|
||||
except Exception as e:
|
||||
conn.rollback()
|
||||
raise e
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
def _init_database(self):
|
||||
"""Create tables if they don't exist."""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check if station_data table exists and if it has added_by_username column
|
||||
cursor.execute("""
|
||||
SELECT name FROM sqlite_master
|
||||
WHERE type='table' AND name='station_data'
|
||||
""")
|
||||
table_exists = cursor.fetchone() is not None
|
||||
|
||||
if table_exists:
|
||||
# Check if added_by_username column exists
|
||||
cursor.execute("PRAGMA table_info(station_data)")
|
||||
columns = [row[1] for row in cursor.fetchall()]
|
||||
|
||||
if 'added_by_username' not in columns:
|
||||
# Add the new column with a default value for existing rows
|
||||
cursor.execute("""
|
||||
ALTER TABLE station_data
|
||||
ADD COLUMN added_by_username TEXT NOT NULL DEFAULT 'unknown'
|
||||
""")
|
||||
|
||||
# Main station data table
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS station_data (
|
||||
-- Primary identification
|
||||
station_id TEXT PRIMARY KEY,
|
||||
station_name TEXT,
|
||||
station_user_name TEXT,
|
||||
|
||||
-- User who added this station
|
||||
added_by_username TEXT NOT NULL,
|
||||
|
||||
-- Owner information
|
||||
owner_id TEXT,
|
||||
owner_name TEXT,
|
||||
|
||||
-- Location
|
||||
belonging_country_code TEXT,
|
||||
postal_code TEXT,
|
||||
coordinates TEXT, -- Stored as "longitude,latitude"
|
||||
|
||||
-- Power specifications
|
||||
pv_power REAL, -- Total PV power
|
||||
rated_power REAL, -- Rated power
|
||||
capacity_pv1 REAL, -- PV capacity
|
||||
|
||||
-- Pricing information
|
||||
price_type TEXT,
|
||||
buy_product_code TEXT,
|
||||
sell_product_code TEXT,
|
||||
price_platform_code TEXT,
|
||||
price_product_code TEXT,
|
||||
|
||||
-- Arrays stored as JSON
|
||||
buy_prices TEXT, -- JSON array
|
||||
sell_prices TEXT, -- JSON array
|
||||
|
||||
-- Metadata
|
||||
created_at DATETIME NOT NULL,
|
||||
updated_at DATETIME NOT NULL
|
||||
)
|
||||
""")
|
||||
|
||||
# BMS (Battery Management System) table
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS station_bms (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
station_id TEXT NOT NULL,
|
||||
|
||||
-- BMS specific fields
|
||||
adr TEXT, -- Address
|
||||
search_type TEXT,
|
||||
dtu_software_ver TEXT,
|
||||
dev_id TEXT,
|
||||
|
||||
-- Store full JSON for all other fields
|
||||
raw_data TEXT,
|
||||
|
||||
created_at DATETIME NOT NULL,
|
||||
|
||||
FOREIGN KEY (station_id) REFERENCES station_data(station_id) ON DELETE CASCADE
|
||||
)
|
||||
""")
|
||||
|
||||
# PCS (Power Conversion System) table
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS station_pcs (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
station_id TEXT NOT NULL,
|
||||
|
||||
-- PCS specific fields
|
||||
adr TEXT, -- Address
|
||||
parallel TEXT,
|
||||
create_timestamp BIGINT,
|
||||
update_timestamp BIGINT,
|
||||
|
||||
-- Store full JSON for all other fields
|
||||
raw_data TEXT,
|
||||
|
||||
created_at DATETIME NOT NULL,
|
||||
|
||||
FOREIGN KEY (station_id) REFERENCES station_data(station_id) ON DELETE CASCADE
|
||||
)
|
||||
""")
|
||||
|
||||
# MT (Meter) table
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS station_mt (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
station_id TEXT NOT NULL,
|
||||
|
||||
-- Store full JSON
|
||||
raw_data TEXT,
|
||||
|
||||
created_at DATETIME NOT NULL,
|
||||
|
||||
FOREIGN KEY (station_id) REFERENCES station_data(station_id) ON DELETE CASCADE
|
||||
)
|
||||
""")
|
||||
|
||||
# EV Charger table
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS station_ev_charge (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
station_id TEXT NOT NULL,
|
||||
|
||||
-- Store full JSON
|
||||
raw_data TEXT,
|
||||
|
||||
created_at DATETIME NOT NULL,
|
||||
|
||||
FOREIGN KEY (station_id) REFERENCES station_data(station_id) ON DELETE CASCADE
|
||||
)
|
||||
""")
|
||||
|
||||
# Heat system table
|
||||
cursor.execute("""
|
||||
CREATE TABLE IF NOT EXISTS station_heat (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
station_id TEXT NOT NULL,
|
||||
|
||||
-- Store full JSON
|
||||
raw_data TEXT,
|
||||
|
||||
created_at DATETIME NOT NULL,
|
||||
|
||||
FOREIGN KEY (station_id) REFERENCES station_data(station_id) ON DELETE CASCADE
|
||||
)
|
||||
""")
|
||||
|
||||
# Create indexes
|
||||
cursor.execute("""
|
||||
CREATE INDEX IF NOT EXISTS idx_station_id
|
||||
ON station_data(station_id)
|
||||
""")
|
||||
|
||||
cursor.execute("""
|
||||
CREATE INDEX IF NOT EXISTS idx_station_added_by
|
||||
ON station_data(added_by_username)
|
||||
""")
|
||||
|
||||
cursor.execute("""
|
||||
CREATE INDEX IF NOT EXISTS idx_bms_station_id
|
||||
ON station_bms(station_id)
|
||||
""")
|
||||
|
||||
cursor.execute("""
|
||||
CREATE INDEX IF NOT EXISTS idx_pcs_station_id
|
||||
ON station_pcs(station_id)
|
||||
""")
|
||||
|
||||
conn.commit()
|
||||
|
||||
def upsert_station_data(self, data: Dict, username: str) -> str:
|
||||
"""
|
||||
Insert or update station data along with all related devices.
|
||||
|
||||
Args:
|
||||
data: Dictionary containing the station API response
|
||||
username: Username of the user adding/updating the station
|
||||
|
||||
Returns:
|
||||
The station_id that was inserted/updated
|
||||
"""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
station_id = data.get('stationId')
|
||||
if not station_id:
|
||||
raise ValueError("stationId is required")
|
||||
|
||||
now = datetime.now().isoformat()
|
||||
|
||||
# Check if station exists
|
||||
cursor.execute("SELECT station_id, added_by_username FROM station_data WHERE station_id = ?", (station_id,))
|
||||
existing = cursor.fetchone()
|
||||
|
||||
if existing:
|
||||
# Update existing station (keep original added_by_username)
|
||||
cursor.execute("""
|
||||
UPDATE station_data SET
|
||||
station_name = ?,
|
||||
station_user_name = ?,
|
||||
owner_id = ?,
|
||||
owner_name = ?,
|
||||
belonging_country_code = ?,
|
||||
postal_code = ?,
|
||||
coordinates = ?,
|
||||
pv_power = ?,
|
||||
rated_power = ?,
|
||||
capacity_pv1 = ?,
|
||||
price_type = ?,
|
||||
buy_product_code = ?,
|
||||
sell_product_code = ?,
|
||||
price_platform_code = ?,
|
||||
price_product_code = ?,
|
||||
buy_prices = ?,
|
||||
sell_prices = ?,
|
||||
updated_at = ?
|
||||
WHERE station_id = ?
|
||||
""", (
|
||||
data.get('stationName'),
|
||||
data.get('stationUserName'),
|
||||
data.get('ownerId'),
|
||||
data.get('ownerName'),
|
||||
data.get('belongingCountryCode'),
|
||||
data.get('postalCode'),
|
||||
data.get('coordinates'),
|
||||
self._to_float(data.get('pvPower')),
|
||||
self._to_float(data.get('ratedPower')),
|
||||
self._to_float(data.get('capacityPv1')),
|
||||
data.get('priceType'),
|
||||
data.get('buyProductCode'),
|
||||
data.get('sellProductCode'),
|
||||
data.get('pricePlatformCode'),
|
||||
data.get('priceProductCode'),
|
||||
json.dumps(data.get('buyPrices', [])),
|
||||
json.dumps(data.get('sellPrices', [])),
|
||||
now,
|
||||
station_id
|
||||
))
|
||||
|
||||
# Delete existing related records (we'll re-insert)
|
||||
cursor.execute("DELETE FROM station_bms WHERE station_id = ?", (station_id,))
|
||||
cursor.execute("DELETE FROM station_pcs WHERE station_id = ?", (station_id,))
|
||||
cursor.execute("DELETE FROM station_mt WHERE station_id = ?", (station_id,))
|
||||
cursor.execute("DELETE FROM station_ev_charge WHERE station_id = ?", (station_id,))
|
||||
cursor.execute("DELETE FROM station_heat WHERE station_id = ?", (station_id,))
|
||||
|
||||
else:
|
||||
# Insert new station
|
||||
cursor.execute("""
|
||||
INSERT INTO station_data (
|
||||
station_id, added_by_username, station_name, station_user_name,
|
||||
owner_id, owner_name,
|
||||
belonging_country_code, postal_code, coordinates,
|
||||
pv_power, rated_power, capacity_pv1,
|
||||
price_type, buy_product_code, sell_product_code,
|
||||
price_platform_code, price_product_code,
|
||||
buy_prices, sell_prices,
|
||||
created_at, updated_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", (
|
||||
station_id,
|
||||
username,
|
||||
data.get('stationName'),
|
||||
data.get('stationUserName'),
|
||||
data.get('ownerId'),
|
||||
data.get('ownerName'),
|
||||
data.get('belongingCountryCode'),
|
||||
data.get('postalCode'),
|
||||
data.get('coordinates'),
|
||||
self._to_float(data.get('pvPower')),
|
||||
self._to_float(data.get('ratedPower')),
|
||||
self._to_float(data.get('capacityPv1')),
|
||||
data.get('priceType'),
|
||||
data.get('buyProductCode'),
|
||||
data.get('sellProductCode'),
|
||||
data.get('pricePlatformCode'),
|
||||
data.get('priceProductCode'),
|
||||
json.dumps(data.get('buyPrices', [])),
|
||||
json.dumps(data.get('sellPrices', [])),
|
||||
now,
|
||||
now
|
||||
))
|
||||
|
||||
# Insert BMS devices
|
||||
for bms in data.get('bmsList', []):
|
||||
cursor.execute("""
|
||||
INSERT INTO station_bms (
|
||||
station_id, adr, search_type, dtu_software_ver, dev_id,
|
||||
raw_data, created_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
""", (
|
||||
station_id,
|
||||
bms.get('adr'),
|
||||
bms.get('searchType'),
|
||||
bms.get('dtuSoftwareVer'),
|
||||
bms.get('devId'),
|
||||
json.dumps(bms),
|
||||
now
|
||||
))
|
||||
|
||||
# Insert PCS devices
|
||||
for pcs in data.get('pcsList', []):
|
||||
cursor.execute("""
|
||||
INSERT INTO station_pcs (
|
||||
station_id, adr, parallel, create_timestamp, update_timestamp,
|
||||
raw_data, created_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
""", (
|
||||
station_id,
|
||||
pcs.get('adr'),
|
||||
pcs.get('parallel'),
|
||||
pcs.get('createTimestamp'),
|
||||
pcs.get('updateTimestamp'),
|
||||
json.dumps(pcs),
|
||||
now
|
||||
))
|
||||
|
||||
# Insert MT devices
|
||||
for mt in data.get('mtList', []):
|
||||
cursor.execute("""
|
||||
INSERT INTO station_mt (station_id, raw_data, created_at)
|
||||
VALUES (?, ?, ?)
|
||||
""", (station_id, json.dumps(mt), now))
|
||||
|
||||
# Insert EV chargers
|
||||
for ev in data.get('evChargeList', []):
|
||||
cursor.execute("""
|
||||
INSERT INTO station_ev_charge (station_id, raw_data, created_at)
|
||||
VALUES (?, ?, ?)
|
||||
""", (station_id, json.dumps(ev), now))
|
||||
|
||||
# Insert heat systems
|
||||
for heat in data.get('heatList', []):
|
||||
cursor.execute("""
|
||||
INSERT INTO station_heat (station_id, raw_data, created_at)
|
||||
VALUES (?, ?, ?)
|
||||
""", (station_id, json.dumps(heat), now))
|
||||
|
||||
return station_id
|
||||
|
||||
def get_station_data(self, station_id: str, username: str) -> Optional[Dict]:
|
||||
"""
|
||||
Get station data with all related devices for a specific user.
|
||||
|
||||
Args:
|
||||
station_id: Station identifier
|
||||
username: Username to verify ownership
|
||||
|
||||
Returns:
|
||||
Dictionary with station data and related devices, or None if not found or not owned by user
|
||||
"""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Get main station data, verifying ownership
|
||||
cursor.execute(
|
||||
"SELECT * FROM station_data WHERE station_id = ? AND added_by_username = ?",
|
||||
(station_id, username)
|
||||
)
|
||||
station = cursor.fetchone()
|
||||
|
||||
if not station:
|
||||
return None
|
||||
|
||||
result = dict(station)
|
||||
|
||||
# Parse JSON fields
|
||||
result['buyPrices'] = json.loads(result.pop('buy_prices', '[]'))
|
||||
result['sellPrices'] = json.loads(result.pop('sell_prices', '[]'))
|
||||
|
||||
# Get BMS devices
|
||||
cursor.execute("SELECT * FROM station_bms WHERE station_id = ?", (station_id,))
|
||||
result['bmsList'] = [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
# Get PCS devices
|
||||
cursor.execute("SELECT * FROM station_pcs WHERE station_id = ?", (station_id,))
|
||||
result['pcsList'] = [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
# Get MT devices
|
||||
cursor.execute("SELECT * FROM station_mt WHERE station_id = ?", (station_id,))
|
||||
result['mtList'] = [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
# Get EV chargers
|
||||
cursor.execute("SELECT * FROM station_ev_charge WHERE station_id = ?", (station_id,))
|
||||
result['evChargeList'] = [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
# Get heat systems
|
||||
cursor.execute("SELECT * FROM station_heat WHERE station_id = ?", (station_id,))
|
||||
result['heatList'] = [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
return result
|
||||
|
||||
def list_stations(self, username: str) -> List[Dict]:
|
||||
"""
|
||||
Get list of stations for a specific user.
|
||||
|
||||
Args:
|
||||
username: Username to filter stations by
|
||||
|
||||
Returns:
|
||||
List of station data dictionaries owned by the user
|
||||
"""
|
||||
with self._get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute(
|
||||
"SELECT * FROM station_data WHERE added_by_username = ? ORDER BY station_name",
|
||||
(username,)
|
||||
)
|
||||
return [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
@staticmethod
|
||||
def _to_float(value) -> Optional[float]:
|
||||
"""Convert string or numeric value to float, handling None."""
|
||||
if value is None or value == "":
|
||||
return None
|
||||
try:
|
||||
return float(value)
|
||||
except (ValueError, TypeError):
|
||||
return None
|
||||
47
v2/backend/src/encryption.py
Normal file
47
v2/backend/src/encryption.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""
|
||||
Encryption utilities for sensitive data storage.
|
||||
"""
|
||||
import os
|
||||
from cryptography.fernet import Fernet
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
# Get or generate encryption key
|
||||
ENCRYPTION_KEY = os.getenv("ENCRYPTION_KEY")
|
||||
if not ENCRYPTION_KEY:
|
||||
# Generate a key if not provided (for development)
|
||||
# In production, this should be set in .env
|
||||
ENCRYPTION_KEY = Fernet.generate_key().decode()
|
||||
print(f"WARNING: No ENCRYPTION_KEY in .env. Generated: {ENCRYPTION_KEY}")
|
||||
print("Add this to your .env file!")
|
||||
|
||||
_fernet = Fernet(ENCRYPTION_KEY.encode() if isinstance(ENCRYPTION_KEY, str) else ENCRYPTION_KEY)
|
||||
|
||||
|
||||
def encrypt_string(plain_text: str) -> str:
|
||||
"""
|
||||
Encrypt a string using Fernet symmetric encryption.
|
||||
|
||||
Args:
|
||||
plain_text: String to encrypt
|
||||
|
||||
Returns:
|
||||
Base64-encoded encrypted string
|
||||
"""
|
||||
encrypted = _fernet.encrypt(plain_text.encode('utf-8'))
|
||||
return encrypted.decode('utf-8')
|
||||
|
||||
|
||||
def decrypt_string(encrypted_text: str) -> str:
|
||||
"""
|
||||
Decrypt a Fernet-encrypted string.
|
||||
|
||||
Args:
|
||||
encrypted_text: Base64-encoded encrypted string
|
||||
|
||||
Returns:
|
||||
Decrypted plain text string
|
||||
"""
|
||||
decrypted = _fernet.decrypt(encrypted_text.encode('utf-8'))
|
||||
return decrypted.decode('utf-8')
|
||||
406
v2/backend/src/main.py
Normal file
406
v2/backend/src/main.py
Normal file
@@ -0,0 +1,406 @@
|
||||
from fastapi import FastAPI, HTTPException, Depends
|
||||
from pydantic import BaseModel
|
||||
|
||||
from service.hanchu_service import HanchuESSService
|
||||
from service.auth_service import get_auth_service
|
||||
from database.db import StationDB
|
||||
from auth import create_access_token, create_refresh_token, decode_access_token, get_current_user
|
||||
|
||||
app = FastAPI(title="HanchuESS Solar Backend API v2")
|
||||
|
||||
# Initialize services
|
||||
station_db = StationDB()
|
||||
auth_service = get_auth_service()
|
||||
|
||||
|
||||
class RegisterRequest(BaseModel):
|
||||
"""Request model for user registration."""
|
||||
username: str
|
||||
password: str
|
||||
hanchu_username: str
|
||||
hanchu_password: str
|
||||
|
||||
|
||||
class LoginRequest(BaseModel):
|
||||
"""Request model for user login."""
|
||||
username: str
|
||||
password: str
|
||||
|
||||
|
||||
class DecryptRequest(BaseModel):
|
||||
"""Request model for decrypting a payload."""
|
||||
encrypted_payload: str
|
||||
|
||||
|
||||
class RefreshTokenRequest(BaseModel):
|
||||
"""Request model for refreshing access token."""
|
||||
refresh_token: str
|
||||
|
||||
|
||||
@app.get("/", tags=["Root"])
|
||||
def root():
|
||||
return {"message": "Welcome to HanchuESS Solar Backend API v2"}
|
||||
|
||||
|
||||
@app.get("/health", tags=["Root"])
|
||||
def health_check():
|
||||
return {"status": "healthy"}
|
||||
|
||||
|
||||
@app.post("/auth/register", tags=["Authentication"])
|
||||
def register(request: RegisterRequest):
|
||||
"""
|
||||
Register a new user with their HanchuESS credentials.
|
||||
|
||||
Args:
|
||||
request: Contains username, password, and HanchuESS API credentials
|
||||
|
||||
Returns:
|
||||
Success message
|
||||
"""
|
||||
try:
|
||||
user_id = auth_service.register_user(
|
||||
username=request.username,
|
||||
password=request.password,
|
||||
hanchu_username=request.hanchu_username,
|
||||
hanchu_password=request.hanchu_password
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "User registered successfully",
|
||||
"user_id": user_id
|
||||
}
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.post("/auth/login", tags=["Authentication"])
|
||||
def login(request: LoginRequest):
|
||||
"""
|
||||
Login and receive a JWT access token.
|
||||
|
||||
Args:
|
||||
request: Contains username and password
|
||||
|
||||
Returns:
|
||||
JWT access token
|
||||
"""
|
||||
try:
|
||||
# Authenticate user
|
||||
user = auth_service.authenticate_user(request.username, request.password)
|
||||
if not user:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Incorrect username or password"
|
||||
)
|
||||
|
||||
# Create access token
|
||||
access_token = create_access_token(data={"sub": user["username"]})
|
||||
|
||||
# Create refresh token
|
||||
refresh_token = create_refresh_token(data={"sub": user["username"]})
|
||||
|
||||
# Store refresh token in database
|
||||
auth_service.store_refresh_token(refresh_token, user["username"])
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"access_token": access_token,
|
||||
"refresh_token": refresh_token,
|
||||
"token_type": "bearer",
|
||||
"expires_in": 900 # 15 minutes in seconds
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.post("/auth/refresh", tags=["Authentication"])
|
||||
def refresh_access_token(request: RefreshTokenRequest):
|
||||
"""
|
||||
Get a new access token using a refresh token.
|
||||
|
||||
Args:
|
||||
request: Contains refresh_token
|
||||
|
||||
Returns:
|
||||
New access token
|
||||
"""
|
||||
try:
|
||||
# Validate refresh token
|
||||
username = auth_service.validate_refresh_token(request.refresh_token)
|
||||
if not username:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
detail="Invalid or expired refresh token"
|
||||
)
|
||||
|
||||
# Create new access token
|
||||
access_token = create_access_token(data={"sub": username})
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"access_token": access_token,
|
||||
"token_type": "bearer",
|
||||
"expires_in": 900 # 15 minutes in seconds
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.post("/auth/logout", tags=["Authentication"])
|
||||
def logout(request: RefreshTokenRequest, current_user: dict = Depends(get_current_user)):
|
||||
"""
|
||||
Logout by revoking the refresh token.
|
||||
|
||||
Args:
|
||||
request: Contains refresh_token to revoke
|
||||
current_user: Current authenticated user
|
||||
|
||||
Returns:
|
||||
Success message
|
||||
"""
|
||||
try:
|
||||
# Revoke the refresh token
|
||||
revoked = auth_service.revoke_refresh_token(request.refresh_token)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Logged out successfully" if revoked else "Token already revoked"
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.post("/auth/logout-all", tags=["Authentication"])
|
||||
def logout_all(current_user: dict = Depends(get_current_user)):
|
||||
"""
|
||||
Logout from all devices by revoking all refresh tokens.
|
||||
|
||||
Args:
|
||||
current_user: Current authenticated user
|
||||
|
||||
Returns:
|
||||
Success message with count of revoked tokens
|
||||
"""
|
||||
try:
|
||||
# Revoke all refresh tokens for the user
|
||||
count = auth_service.revoke_all_user_tokens(current_user["username"])
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": f"Logged out from {count} device(s)"
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
def get_user_service(current_user: dict = Depends(get_current_user)) -> HanchuESSService:
|
||||
"""
|
||||
Get HanchuESS service instance configured for the current user.
|
||||
|
||||
Args:
|
||||
current_user: Current authenticated user from JWT token
|
||||
|
||||
Returns:
|
||||
HanchuESSService instance with user's credentials
|
||||
"""
|
||||
# Get user's HanchuESS credentials from auth service
|
||||
credentials = auth_service.get_user_hanchu_credentials(current_user["username"])
|
||||
if not credentials:
|
||||
raise HTTPException(status_code=401, detail="User not found")
|
||||
|
||||
# Create service instance with user's credentials
|
||||
return HanchuESSService(
|
||||
hanchu_username=credentials["hanchu_username"],
|
||||
hanchu_password=credentials["hanchu_password"]
|
||||
)
|
||||
|
||||
|
||||
@app.get("/hanchu/stations/list", tags=["HanchuESS API"])
|
||||
def query_stations_from_api(
|
||||
current: int = 1,
|
||||
size: int = 100,
|
||||
hanchu_service: HanchuESSService = Depends(get_user_service)
|
||||
):
|
||||
"""
|
||||
Query list of stations from HanchuESS API.
|
||||
|
||||
Args:
|
||||
current: Page number (default 1)
|
||||
size: Page size (default 100)
|
||||
|
||||
Returns:
|
||||
Paginated list of stations with their IDs and names
|
||||
"""
|
||||
try:
|
||||
result = hanchu_service.query_station_list(current=current, size=size)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"data": result
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.post("/utils/decrypt", tags=["Utilities"])
|
||||
def decrypt_payload(
|
||||
request: DecryptRequest,
|
||||
hanchu_service: HanchuESSService = Depends(get_user_service)
|
||||
):
|
||||
"""
|
||||
Decrypt an AES-encrypted HanchuESS payload.
|
||||
|
||||
Args:
|
||||
request: Contains encrypted_payload (base64-encoded encrypted string)
|
||||
|
||||
Returns:
|
||||
Decrypted data and its type
|
||||
"""
|
||||
try:
|
||||
decrypted_data = hanchu_service.decrypt_payload(request.encrypted_payload)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"decrypted_data": decrypted_data,
|
||||
"data_type": type(decrypted_data).__name__
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=f"Decryption failed: {str(e)}")
|
||||
|
||||
|
||||
@app.post("/db/stations/sync", tags=["Database"])
|
||||
def sync_stations(
|
||||
hanchu_service: HanchuESSService = Depends(get_user_service),
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""
|
||||
Fetch all stations from HanchuESS API and store them in database.
|
||||
|
||||
Returns:
|
||||
Summary of synced stations
|
||||
"""
|
||||
try:
|
||||
# Get list of stations from API
|
||||
station_list = hanchu_service.query_station_list(current=1, size=100)
|
||||
records = station_list.get('records', [])
|
||||
|
||||
if not records:
|
||||
return {
|
||||
"success": True,
|
||||
"message": "No stations found",
|
||||
"synced": 0,
|
||||
"failed": 0,
|
||||
"stations": []
|
||||
}
|
||||
|
||||
synced = []
|
||||
failed = []
|
||||
|
||||
# Fetch and store each station
|
||||
for record in records:
|
||||
station_id = record.get('stationId')
|
||||
if not station_id:
|
||||
continue
|
||||
|
||||
try:
|
||||
# Fetch full station info
|
||||
station_data = hanchu_service.get_station_info(station_id)
|
||||
|
||||
# Check if we got real data
|
||||
has_real_data = any(
|
||||
key in station_data and station_data[key] not in [None, [], "", {}]
|
||||
for key in ['stationName', 'pcsList', 'bmsList', 'coordinates', 'ownerName']
|
||||
)
|
||||
|
||||
if has_real_data:
|
||||
# Store in database with username
|
||||
station_db.upsert_station_data(station_data, current_user["username"])
|
||||
synced.append({
|
||||
"stationId": station_id,
|
||||
"stationName": station_data.get('stationName')
|
||||
})
|
||||
else:
|
||||
failed.append({
|
||||
"stationId": station_id,
|
||||
"reason": "No data available"
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
failed.append({
|
||||
"stationId": station_id,
|
||||
"reason": str(e)
|
||||
})
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": f"Synced {len(synced)} stations, {len(failed)} failed",
|
||||
"synced": len(synced),
|
||||
"failed": len(failed),
|
||||
"stations": synced,
|
||||
"errors": failed
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.get("/db/stations/{station_id}", tags=["Database"])
|
||||
def get_station_from_db(
|
||||
station_id: str,
|
||||
current_user: dict = Depends(get_current_user)
|
||||
):
|
||||
"""
|
||||
Get station data from database.
|
||||
|
||||
Args:
|
||||
station_id: Station ID to retrieve
|
||||
|
||||
Returns:
|
||||
Station data with all devices
|
||||
"""
|
||||
try:
|
||||
station = station_db.get_station_data(station_id, current_user["username"])
|
||||
|
||||
if not station:
|
||||
raise HTTPException(status_code=404, detail="Station not found or access denied")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"data": station
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@app.get("/db/stations", tags=["Database"])
|
||||
def list_stations_from_db(current_user: dict = Depends(get_current_user)):
|
||||
"""
|
||||
List all stations from local database for the current user.
|
||||
|
||||
Returns:
|
||||
List of user's stations
|
||||
"""
|
||||
try:
|
||||
stations = station_db.list_stations(current_user["username"])
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"count": len(stations),
|
||||
"data": stations
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run(app, host="0.0.0.0", port=8050)
|
||||
1
v2/backend/src/service/__init__.py
Normal file
1
v2/backend/src/service/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Service layer for HanchuESS API integration."""
|
||||
177
v2/backend/src/service/auth_service.py
Normal file
177
v2/backend/src/service/auth_service.py
Normal file
@@ -0,0 +1,177 @@
|
||||
"""
|
||||
Authentication service for user management.
|
||||
"""
|
||||
import bcrypt
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional
|
||||
from database.db import UserDB
|
||||
from encryption import encrypt_string, decrypt_string
|
||||
|
||||
|
||||
class AuthService:
|
||||
"""Service for user authentication operations."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize authentication service with user database."""
|
||||
self.user_db = UserDB()
|
||||
|
||||
def hash_password(self, password: str) -> str:
|
||||
"""Hash a password using bcrypt."""
|
||||
# Bcrypt has a 72 byte limit, truncate if necessary
|
||||
if len(password.encode('utf-8')) > 72:
|
||||
password = password[:72]
|
||||
|
||||
password_bytes = password.encode('utf-8')
|
||||
salt = bcrypt.gensalt()
|
||||
hashed = bcrypt.hashpw(password_bytes, salt)
|
||||
|
||||
return hashed.decode('utf-8')
|
||||
|
||||
def verify_password(self, plain_password: str, hashed_password: str) -> bool:
|
||||
"""Verify a password against a hash."""
|
||||
# Bcrypt has a 72 byte limit, truncate if necessary
|
||||
if len(plain_password.encode('utf-8')) > 72:
|
||||
plain_password = plain_password[:72]
|
||||
|
||||
password_bytes = plain_password.encode('utf-8')
|
||||
hashed_bytes = hashed_password.encode('utf-8')
|
||||
|
||||
return bcrypt.checkpw(password_bytes, hashed_bytes)
|
||||
|
||||
def register_user(self, username: str, password: str,
|
||||
hanchu_username: str, hanchu_password: str) -> int:
|
||||
"""
|
||||
Register a new user.
|
||||
|
||||
Args:
|
||||
username: User's login username
|
||||
password: User's login password (will be hashed)
|
||||
hanchu_username: HanchuESS API username
|
||||
hanchu_password: HanchuESS API password (will be encrypted)
|
||||
|
||||
Returns:
|
||||
User ID
|
||||
|
||||
Raises:
|
||||
ValueError: If username already exists
|
||||
"""
|
||||
# Check if username already exists
|
||||
existing_user = self.user_db.get_user_by_username(username)
|
||||
if existing_user:
|
||||
raise ValueError("Username already exists")
|
||||
|
||||
# Hash the password
|
||||
hashed_password = self.hash_password(password)
|
||||
|
||||
# Encrypt HanchuESS credentials before storing
|
||||
encrypted_hanchu_password = encrypt_string(hanchu_password)
|
||||
|
||||
# Create user
|
||||
user_id = self.user_db.create_user(
|
||||
username=username,
|
||||
hashed_password=hashed_password,
|
||||
hanchu_username=hanchu_username,
|
||||
hanchu_password=encrypted_hanchu_password
|
||||
)
|
||||
|
||||
return user_id
|
||||
|
||||
def authenticate_user(self, username: str, password: str) -> Optional[dict]:
|
||||
"""
|
||||
Authenticate a user with username and password.
|
||||
|
||||
Args:
|
||||
username: User's login username
|
||||
password: User's login password
|
||||
|
||||
Returns:
|
||||
User dictionary if authentication successful, None otherwise
|
||||
"""
|
||||
# Get user from database
|
||||
user = self.user_db.get_user_by_username(username)
|
||||
if not user:
|
||||
return None
|
||||
|
||||
# Verify password
|
||||
if not self.verify_password(password, user["hashed_password"]):
|
||||
return None
|
||||
|
||||
return user
|
||||
|
||||
def get_user_hanchu_credentials(self, username: str) -> Optional[dict]:
|
||||
"""
|
||||
Get user's HanchuESS credentials.
|
||||
|
||||
Args:
|
||||
username: User's login username
|
||||
|
||||
Returns:
|
||||
Dictionary with hanchu_username and decrypted hanchu_password, or None if not found
|
||||
"""
|
||||
user = self.user_db.get_user_by_username(username)
|
||||
if not user:
|
||||
return None
|
||||
|
||||
# Decrypt the HanchuESS password
|
||||
decrypted_password = decrypt_string(user["hanchu_password"])
|
||||
|
||||
return {
|
||||
"hanchu_username": user["hanchu_username"],
|
||||
"hanchu_password": decrypted_password
|
||||
}
|
||||
|
||||
def store_refresh_token(self, token: str, username: str, expires_in_days: int = 7) -> int:
|
||||
"""
|
||||
Store a refresh token.
|
||||
|
||||
Args:
|
||||
token: Refresh token string
|
||||
username: Username
|
||||
expires_in_days: Days until expiration
|
||||
|
||||
Returns:
|
||||
Token ID
|
||||
"""
|
||||
expires_at = (datetime.now() + timedelta(days=expires_in_days)).isoformat()
|
||||
return self.user_db.store_refresh_token(token, username, expires_at)
|
||||
|
||||
def validate_refresh_token(self, token: str) -> Optional[str]:
|
||||
"""
|
||||
Validate a refresh token and return the username if valid.
|
||||
|
||||
Args:
|
||||
token: Refresh token string
|
||||
|
||||
Returns:
|
||||
Username if valid, None otherwise
|
||||
"""
|
||||
token_data = self.user_db.get_refresh_token(token)
|
||||
if not token_data:
|
||||
return None
|
||||
|
||||
# Check if expired
|
||||
expires_at = datetime.fromisoformat(token_data["expires_at"])
|
||||
if datetime.now() > expires_at:
|
||||
return None
|
||||
|
||||
return token_data["username"]
|
||||
|
||||
def revoke_refresh_token(self, token: str) -> bool:
|
||||
"""Revoke a refresh token."""
|
||||
return self.user_db.revoke_refresh_token(token)
|
||||
|
||||
def revoke_all_user_tokens(self, username: str) -> int:
|
||||
"""Revoke all refresh tokens for a user."""
|
||||
return self.user_db.revoke_all_user_tokens(username)
|
||||
|
||||
|
||||
# Global service instance
|
||||
_auth_service = None
|
||||
|
||||
|
||||
def get_auth_service() -> AuthService:
|
||||
"""Get or create the global auth service instance."""
|
||||
global _auth_service
|
||||
if _auth_service is None:
|
||||
_auth_service = AuthService()
|
||||
return _auth_service
|
||||
315
v2/backend/src/service/hanchu_service.py
Normal file
315
v2/backend/src/service/hanchu_service.py
Normal file
@@ -0,0 +1,315 @@
|
||||
"""
|
||||
HanchuESS API service for authentication and API calls.
|
||||
"""
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import requests
|
||||
|
||||
from Crypto.Cipher import AES, PKCS1_v1_5
|
||||
from Crypto.PublicKey import RSA
|
||||
from Crypto.Util.Padding import pad, unpad
|
||||
from dotenv import load_dotenv
|
||||
|
||||
|
||||
load_dotenv()
|
||||
|
||||
|
||||
class HanchuESSService:
|
||||
"""Service for interacting with HanchuESS solar API."""
|
||||
|
||||
# RSA public key from the JavaScript code
|
||||
RSA_PUBLIC_KEY = """-----BEGIN PUBLIC KEY-----
|
||||
MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVg7RFDLMGM4O98d1zWKI5RQan
|
||||
jci3iY4qlpgsH76fUn3GnZtqjbRk37lCQDv6AhgPNXRPpty81+g909/c4yzySKaP
|
||||
CcDZv7KdCRB1mVxkq+0z4EtKx9EoTXKnFSDBaYi2srdal1tM3gGOsNTDN58CzYPX
|
||||
nDGPX7+EHS1Mm4aVDQIDAQAB
|
||||
-----END PUBLIC KEY-----"""
|
||||
|
||||
def __init__(self, hanchu_username: str = None, hanchu_password: str = None):
|
||||
"""
|
||||
Initialize service with configuration from environment or parameters.
|
||||
|
||||
Args:
|
||||
hanchu_username: Optional HanchuESS username (overrides env var)
|
||||
hanchu_password: Optional HanchuESS password (overrides env var)
|
||||
"""
|
||||
self.name = "HanchuESS Service"
|
||||
|
||||
# Load config from environment
|
||||
self.aes_key = os.environ["HANCHU_AES_KEY"].encode("utf-8")
|
||||
self.aes_iv = os.environ["HANCHU_AES_IV"].encode("utf-8")
|
||||
self.login_url = os.environ["HANCHU_LOGIN_URL"]
|
||||
self.base_url = os.getenv("HANCHU_BASE_URL", "https://iess3.hanchuess.com/gateway/")
|
||||
self.login_type = os.getenv("HANCHU_LOGIN_TYPE", "ACCOUNT")
|
||||
self.timeout = int(os.getenv("HANCHU_HTTP_TIMEOUT", "10"))
|
||||
self.verify_ssl = os.getenv("HANCHU_VERIFY_SSL", "true").lower() == "true"
|
||||
|
||||
# Use provided credentials or fall back to environment
|
||||
self.hanchu_username = hanchu_username or os.getenv("HANCHU_USERNAME", "")
|
||||
self.hanchu_password = hanchu_password or os.getenv("HANCHU_PASSWORD", "")
|
||||
|
||||
# Cache for access token
|
||||
self._access_token = None
|
||||
|
||||
# Safety checks
|
||||
if len(self.aes_key) not in (16, 24, 32):
|
||||
raise ValueError("AES key must be 16, 24, or 32 bytes")
|
||||
if len(self.aes_iv) != 16:
|
||||
raise ValueError("AES IV must be exactly 16 bytes")
|
||||
|
||||
def encrypt_payload(self, data: dict | str) -> str:
|
||||
"""
|
||||
Encrypt payload using AES-CBC and return base64 string.
|
||||
|
||||
Args:
|
||||
data: Dictionary or string to encrypt
|
||||
|
||||
Returns:
|
||||
Base64-encoded encrypted payload
|
||||
"""
|
||||
if not isinstance(data, str):
|
||||
data = json.dumps(data, separators=(",", ":"))
|
||||
|
||||
cipher = AES.new(self.aes_key, AES.MODE_CBC, self.aes_iv)
|
||||
ciphertext = cipher.encrypt(pad(data.encode("utf-8"), AES.block_size))
|
||||
return base64.b64encode(ciphertext).decode("utf-8")
|
||||
|
||||
def decrypt_payload(self, encrypted_data: str) -> dict | str:
|
||||
"""
|
||||
Decrypt base64-encoded AES-CBC payload and return the original data.
|
||||
|
||||
Args:
|
||||
encrypted_data: Base64-encoded encrypted string
|
||||
|
||||
Returns:
|
||||
Decrypted data (dict if valid JSON, string otherwise)
|
||||
"""
|
||||
ciphertext = base64.b64decode(encrypted_data)
|
||||
cipher = AES.new(self.aes_key, AES.MODE_CBC, self.aes_iv)
|
||||
decrypted = unpad(cipher.decrypt(ciphertext), AES.block_size)
|
||||
decrypted_str = decrypted.decode("utf-8")
|
||||
|
||||
# Try to parse as JSON, return string if it fails
|
||||
try:
|
||||
return json.loads(decrypted_str)
|
||||
except json.JSONDecodeError:
|
||||
return decrypted_str
|
||||
|
||||
def encrypt_password_rsa(self, password: str) -> str:
|
||||
"""
|
||||
Encrypt password using RSA public key (matches JavaScript GO function).
|
||||
|
||||
Args:
|
||||
password: Plain text password
|
||||
|
||||
Returns:
|
||||
Base64-encoded encrypted password
|
||||
"""
|
||||
public_key = RSA.import_key(self.RSA_PUBLIC_KEY)
|
||||
cipher = PKCS1_v1_5.new(public_key)
|
||||
encrypted = cipher.encrypt(password.encode('utf-8'))
|
||||
return base64.b64encode(encrypted).decode('utf-8')
|
||||
|
||||
def get_access_token(self) -> str:
|
||||
"""
|
||||
Authenticate with Hanchu ESS and return access token.
|
||||
Uses double encryption: RSA for password, then AES for entire payload.
|
||||
Caches the token to avoid unnecessary logins.
|
||||
|
||||
Returns:
|
||||
JWT access token
|
||||
"""
|
||||
# Return cached token if available
|
||||
if self._access_token:
|
||||
return self._access_token
|
||||
|
||||
# Step 1: RSA encrypt the password
|
||||
encrypted_password = self.encrypt_password_rsa(self.hanchu_password)
|
||||
|
||||
# Step 2: Build payload with encrypted password
|
||||
payload = {
|
||||
"account": self.hanchu_username,
|
||||
"pwd": encrypted_password,
|
||||
"loginType": self.login_type,
|
||||
}
|
||||
|
||||
# Step 3: AES encrypt the entire payload
|
||||
encrypted_payload = self.encrypt_payload(payload)
|
||||
|
||||
# Step 4: Send to API with correct headers
|
||||
headers = {
|
||||
"Content-Type": "text/plain",
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
|
||||
"appplat": "iess",
|
||||
"locale": "en",
|
||||
"timezone": "Africa/Accra",
|
||||
"timeselected": "GMT",
|
||||
"version": "1.0",
|
||||
"crypto-version": "1.0.0",
|
||||
"Origin": "https://iess3.hanchuess.com",
|
||||
"Referer": "https://iess3.hanchuess.com/login",
|
||||
}
|
||||
|
||||
response = requests.post(
|
||||
self.login_url,
|
||||
data=encrypted_payload,
|
||||
headers=headers,
|
||||
timeout=self.timeout,
|
||||
verify=self.verify_ssl,
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
|
||||
try:
|
||||
# The token is directly in the 'data' field as a JWT string
|
||||
if result.get("success") and result.get("data"):
|
||||
self._access_token = result["data"]
|
||||
return self._access_token
|
||||
else:
|
||||
raise RuntimeError(
|
||||
f"Hanchu login failed: {json.dumps(result, ensure_ascii=False)}"
|
||||
)
|
||||
except (KeyError, TypeError):
|
||||
raise RuntimeError(
|
||||
f"Hanchu login failed: {json.dumps(result, ensure_ascii=False)}"
|
||||
)
|
||||
|
||||
def query_station_list(self, current: int = 1, size: int = 100, access_token: str = None) -> dict:
|
||||
"""
|
||||
Query list of stations from HanchuESS API.
|
||||
|
||||
Args:
|
||||
current: Page number (default 1)
|
||||
size: Page size (default 100)
|
||||
access_token: Optional JWT token. If not provided, will get one automatically.
|
||||
|
||||
Returns:
|
||||
Dictionary containing station list with pagination info
|
||||
"""
|
||||
# Get token if not provided
|
||||
if not access_token:
|
||||
access_token = self.get_access_token()
|
||||
|
||||
# Build payload
|
||||
payload = {
|
||||
"current": current,
|
||||
"size": size
|
||||
}
|
||||
|
||||
# Encrypt payload
|
||||
encrypted_payload = self.encrypt_payload(payload)
|
||||
|
||||
# Build headers
|
||||
headers = {
|
||||
"Content-Type": "text/plain",
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
|
||||
"appplat": "iess",
|
||||
"locale": "en",
|
||||
"timezone": "Africa/Accra",
|
||||
"timeselected": "GMT",
|
||||
"version": "1.0",
|
||||
"crypto-version": "1.0.0",
|
||||
"access-token": access_token,
|
||||
"Origin": "https://iess3.hanchuess.com",
|
||||
"Referer": "https://iess3.hanchuess.com/",
|
||||
}
|
||||
|
||||
# Make request
|
||||
url = f"{self.base_url}platform/station/queryList"
|
||||
response = requests.post(
|
||||
url,
|
||||
data=encrypted_payload,
|
||||
headers=headers,
|
||||
timeout=self.timeout,
|
||||
verify=self.verify_ssl,
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
|
||||
if not result.get("success"):
|
||||
raise RuntimeError(
|
||||
f"Failed to query station list: {json.dumps(result, ensure_ascii=False)}"
|
||||
)
|
||||
|
||||
return result.get("data", {})
|
||||
|
||||
def get_station_info(self, station_id: str, access_token: str = None) -> dict:
|
||||
"""
|
||||
Get station information from HanchuESS API.
|
||||
|
||||
Args:
|
||||
station_id: Station ID to query
|
||||
access_token: Optional JWT token. If not provided, will get one automatically.
|
||||
|
||||
Returns:
|
||||
Station information dictionary
|
||||
"""
|
||||
# Get token if not provided
|
||||
if not access_token:
|
||||
access_token = self.get_access_token()
|
||||
|
||||
# Build payload
|
||||
payload = {
|
||||
"stationId": station_id
|
||||
}
|
||||
|
||||
# Encrypt payload
|
||||
encrypted_payload = self.encrypt_payload(payload)
|
||||
|
||||
# Build headers
|
||||
headers = {
|
||||
"Content-Type": "text/plain",
|
||||
"Accept": "application/json, text/plain, */*",
|
||||
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
|
||||
"appplat": "iess",
|
||||
"locale": "en",
|
||||
"timezone": "Africa/Accra",
|
||||
"timeselected": "GMT",
|
||||
"version": "1.0",
|
||||
"crypto-version": "1.0.0",
|
||||
"access-token": access_token,
|
||||
"Origin": "https://iess3.hanchuess.com",
|
||||
"Referer": "https://iess3.hanchuess.com/",
|
||||
}
|
||||
|
||||
# Make request
|
||||
url = f"{self.base_url}platform/homePage/stationInfo"
|
||||
response = requests.post(
|
||||
url,
|
||||
data=encrypted_payload,
|
||||
headers=headers,
|
||||
timeout=self.timeout,
|
||||
verify=self.verify_ssl,
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
result = response.json()
|
||||
|
||||
if not result.get("success"):
|
||||
raise RuntimeError(
|
||||
f"Failed to get station info: {json.dumps(result, ensure_ascii=False)}"
|
||||
)
|
||||
|
||||
data = result.get("data", {})
|
||||
# Add the station_id to the data since it's in our request but might not be in response
|
||||
if "stationId" not in data:
|
||||
data["stationId"] = station_id
|
||||
|
||||
return data
|
||||
|
||||
|
||||
# Global service instance
|
||||
_service_instance = None
|
||||
|
||||
|
||||
def get_service() -> HanchuESSService:
|
||||
"""Get or create the global service instance."""
|
||||
global _service_instance
|
||||
if _service_instance is None:
|
||||
_service_instance = HanchuESSService()
|
||||
return _service_instance
|
||||
3
v2/data/.gitignore
vendored
Normal file
3
v2/data/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# This directory contains the SQLite database
|
||||
# It's mounted from the Docker container
|
||||
*.db
|
||||
25
v2/docker-compose.yml
Normal file
25
v2/docker-compose.yml
Normal file
@@ -0,0 +1,25 @@
|
||||
services:
|
||||
hanchuess-solar-backend:
|
||||
build:
|
||||
context: backend
|
||||
dockerfile: docker/Dockerfile
|
||||
container_name: hanchuess-solar-backend
|
||||
ports:
|
||||
- "8050:8050"
|
||||
volumes:
|
||||
- sqlite_data:/data
|
||||
env_file:
|
||||
- .env
|
||||
environment:
|
||||
- HANCHU_AES_KEY=${HANCHU_AES_KEY}
|
||||
- HANCHU_AES_IV=${HANCHU_AES_IV}
|
||||
- HANCHU_LOGIN_URL=${HANCHU_LOGIN_URL}
|
||||
- HANCHU_LOGIN_TYPE=${HANCHU_LOGIN_TYPE}
|
||||
- HANCHU_HTTP_TIMEOUT=${HANCHU_HTTP_TIMEOUT}
|
||||
- HANCHU_VERIFY_SSL=${HANCHU_VERIFY_SSL}
|
||||
- HANCHU_USERNAME=${HANCHU_USERNAME}
|
||||
- HANCHU_PASSWORD=${HANCHU_PASSWORD}
|
||||
- SQLITE_DB_PATH=${SQLITE_DB_PATH}
|
||||
|
||||
volumes:
|
||||
sqlite_data:
|
||||
Reference in New Issue
Block a user