Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions apps/predbat/axle.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,11 +377,21 @@ def load_axle_slot(base, axle_sessions, export, rate_replicate={}):
if export:
base.rate_export[minute] = base.rate_export.get(minute, 0) + pence_per_kwh
rate_replicate[minute] = "saving"

# Track Axle override in rate store
if base.rate_store:
today = datetime.now()
base.rate_store.update_auto_override(today, minute, None, base.rate_export[minute], "Axle")
else:
base.rate_import[minute] = base.rate_import.get(minute, 0) + pence_per_kwh
base.load_scaling_dynamic[minute] = base.load_scaling_saving
rate_replicate[minute] = "saving"

# Track Axle override in rate store
if base.rate_store:
today = datetime.now()
base.rate_store.update_auto_override(today, minute, base.rate_import[minute], None, "Axle")


def fetch_axle_active(base):
"""
Expand Down
14 changes: 14 additions & 0 deletions apps/predbat/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -826,6 +826,18 @@
"type": "switch",
"default": True,
},
{
"name": "rate_retention_days",
"friendly_name": "Rate Retention Days",
"type": "input_number",
"min": 1,
"max": 365,
"step": 1,
"unit": "days",
"icon": "mdi:database-clock",
"enable": "expert_mode",
"default": 7,
},
{
"name": "set_charge_freeze",
"friendly_name": "Set Charge Freeze",
Expand Down Expand Up @@ -1437,6 +1449,7 @@
"days_previous": True,
"days_previous_weight": True,
"battery_scaling": True,
"rate_retention_days": True,
"forecast_hours": True,
"import_export_scaling": True,
"inverter_limit_charge": True,
Expand Down Expand Up @@ -2073,6 +2086,7 @@
"rates_export_override": {"type": "dict_list"},
"days_previous": {"type": "integer_list"},
"days_previous_weight": {"type": "float_list"},
"rate_retention_days": {"type": "integer"},
"forecast_hours": {"type": "integer"},
"notify_devices": {"type": "string_list"},
"battery_scaling": {"type": "sensor_list", "sensor_type": "float", "entries": "num_inverters", "modify": False},
Expand Down
49 changes: 49 additions & 0 deletions apps/predbat/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -957,6 +957,23 @@ def fetch_sensor_data(self, save=True):
if self.rate_import:
self.rate_scan(self.rate_import, print=False)
self.rate_import, self.rate_import_replicated = self.rate_replicate(self.rate_import, self.io_adjusted, is_import=True)

# Persist base import rates to storage (only non-replicated/non-override data)
if self.rate_store:
today = datetime.now()
for minute in self.rate_import:
# Only persist true API data, not replicated or override data
if minute not in self.rate_import_replicated or self.rate_import_replicated[minute] == "got":
# Get corresponding export rate or use 0
export_rate = self.rate_export.get(minute, 0) if self.rate_export else 0
self.rate_store.write_base_rate(today, minute, self.rate_import[minute], export_rate)

# Rehydrate finalized rates from storage - these take priority over fresh API data
for minute in range(0, self.minutes_now):
finalized_rate = self.rate_store.get_rate(today, minute, is_import=True)
if finalized_rate is not None:
self.rate_import[minute] = finalized_rate

self.rate_import_no_io = self.rate_import.copy()
self.rate_import = self.rate_add_io_slots(self.rate_import, self.octopus_slots)
self.load_saving_slot(self.octopus_saving_slots, export=False, rate_replicate=self.rate_import_replicated)
Expand All @@ -973,6 +990,23 @@ def fetch_sensor_data(self, save=True):
if self.rate_export:
self.rate_scan_export(self.rate_export, print=False)
self.rate_export, self.rate_export_replicated = self.rate_replicate(self.rate_export, is_import=False)

# Persist base export rates to storage (only non-replicated/non-override data)
if self.rate_store:
today = datetime.now()
for minute in self.rate_export:
# Only persist true API data, not replicated or override data
if minute not in self.rate_export_replicated or self.rate_export_replicated[minute] == "got":
# Get corresponding import rate or use 0
import_rate = self.rate_import.get(minute, 0) if self.rate_import else 0
self.rate_store.write_base_rate(today, minute, import_rate, self.rate_export[minute])

# Rehydrate finalized rates from storage - these take priority over fresh API data
for minute in range(0, self.minutes_now):
finalized_rate = self.rate_store.get_rate(today, minute, is_import=False)
if finalized_rate is not None:
self.rate_export[minute] = finalized_rate

# For export tariff only load the saving session if enabled
if self.rate_export_max > 0:
self.load_saving_slot(self.octopus_saving_slots, export=True, rate_replicate=self.rate_export_replicated)
Expand All @@ -988,6 +1022,13 @@ def fetch_sensor_data(self, save=True):
if self.rate_import or self.rate_export:
self.set_rate_thresholds()

# Finalize past slots (5+ minutes past slot start)
if self.rate_store:
today = datetime.now()
finalized = self.rate_store.finalize_slots(today, self.minutes_now)
if finalized > 0:
self.log("Finalized {} rate slots".format(finalized))

# Find discharging windows
if self.rate_export:
self.high_export_rates, lowest, highest = self.rate_scan_window(self.rate_export, 5, self.rate_export_cost_threshold, True, alt_rates=self.rate_import)
Expand Down Expand Up @@ -1390,6 +1431,14 @@ def apply_manual_rates(self, rates, manual_items, is_import=True, rate_replicate
rates[minute] = rate
rate_replicate[minute] = "manual"

# Track manual override in rate store
if self.rate_store:
today = datetime.now()
if is_import:
self.rate_store.update_manual_override(today, minute, rate, None)
else:
self.rate_store.update_manual_override(today, minute, None, rate)

return rates

def basic_rates(self, info, rtype, prev=None, rate_replicate={}):
Expand Down
20 changes: 20 additions & 0 deletions apps/predbat/octopus.py
Original file line number Diff line number Diff line change
Expand Up @@ -1947,12 +1947,22 @@ def load_saving_slot(self, octopus_saving_slots, export=False, rate_replicate={}
if minute in self.rate_export:
self.rate_export[minute] += rate
rate_replicate[minute] = "saving"

# Track saving session override in rate store
if self.rate_store:
today = datetime.now()
self.rate_store.update_auto_override(today, minute, None, self.rate_export[minute], "Saving")
else:
if minute in self.rate_import:
self.rate_import[minute] += rate
self.load_scaling_dynamic[minute] = self.load_scaling_saving
rate_replicate[minute] = "saving"

# Track saving session override in rate store
if self.rate_store:
today = datetime.now()
self.rate_store.update_auto_override(today, minute, self.rate_import[minute], None, "Saving")

def decode_octopus_slot(self, slot, raw=False):
"""
Decode IOG slot
Expand Down Expand Up @@ -2149,10 +2159,20 @@ def rate_add_io_slots(self, rates, octopus_slots):
slots_added_set.add(slot_start)
rates[minute] = assumed_price

# Track IOG override in rate store
if self.rate_store:
today = datetime.now()
self.rate_store.update_auto_override(today, minute, assumed_price, None, "IOG")

else:
# For minutes within a 30-min slot, only apply if the slot was added
if slot_start in slots_added_set:
rates[minute] = assumed_price

# Track IOG override in rate store
if self.rate_store:
today = datetime.now()
self.rate_store.update_auto_override(today, minute, assumed_price, None, "IOG")
else:
assumed_price = self.rate_import.get(start_minutes, self.rate_min)

Expand Down
189 changes: 189 additions & 0 deletions apps/predbat/persistent_store.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,189 @@
# -----------------------------------------------------------------------------
# Predbat Home Battery System
# Copyright Trefor Southwell 2026 - All Rights Reserved
# This application maybe used for personal use only and not for commercial use
# -----------------------------------------------------------------------------
# fmt: off
# pylint: disable=consider-using-f-string
# pylint: disable=line-too-long
# pylint: disable=attribute-defined-outside-init

"""
Base class for persistent JSON file storage with backup and cleanup.
Provides common functionality for components needing to store state across restarts.
"""

import json
import os
from datetime import datetime, timedelta
from pathlib import Path


class PersistentStore:
"""
Abstract base class for persistent JSON file storage.
Handles load/save with backup, cleanup of old files, and automatic timestamping.
"""

def __init__(self, base):
"""Initialize with reference to base PredBat instance"""
self.base = base
self.log = base.log

def load(self, filepath):
"""
Load data from JSON file with automatic backup restoration on corruption.

Args:
filepath: Path to JSON file to load

Returns:
Loaded data dict or None if file doesn't exist or is corrupted
"""
try:
if not os.path.exists(filepath):
return None

with open(filepath, 'r') as f:
data = json.load(f)
return data

except (json.JSONDecodeError, IOError) as e:
self.log(f"Warn: Failed to load {filepath}: {e}")

# Try to restore from backup
backup_path = filepath + '.bak'
if os.path.exists(backup_path):
try:
self.log(f"Warn: Attempting to restore from backup: {backup_path}")
with open(backup_path, 'r') as f:
data = json.load(f)
self.log(f"Warn: Successfully restored from backup")
return data
except (json.JSONDecodeError, IOError) as e2:
self.log(f"Error: Backup restoration failed: {e2}")

return None

def save(self, filepath, data, backup=True):
"""
Save data to JSON file with automatic backup and timestamp.

Args:
filepath: Path to JSON file to save
data: Dict to save (will add last_updated timestamp)
backup: Whether to backup existing file before overwrite

Returns:
True if successful, False otherwise
"""
try:
# Add timestamp
data['last_updated'] = datetime.now().astimezone().isoformat()

# Create directory if needed
os.makedirs(os.path.dirname(filepath), exist_ok=True)

# Backup existing file if requested
if backup and os.path.exists(filepath):
self.backup_file(filepath)

# Write new file
with open(filepath, 'w') as f:
json.dump(data, f, indent=2)

# Cleanup old backups
self.cleanup_backups(filepath)

return True

except (IOError, OSError) as e:
self.log(f"Error: Failed to save {filepath}: {e}")
return False

def backup_file(self, filepath):
"""
Create backup copy of file.

Args:
filepath: Path to file to backup
"""
try:
backup_path = filepath + '.bak'
if os.path.exists(filepath):
import shutil
shutil.copy2(filepath, backup_path)
except (IOError, OSError) as e:
self.log(f"Warn: Failed to backup {filepath}: {e}")

def cleanup_backups(self, filepath):
"""
Remove backup files older than 1 day.

Args:
filepath: Path to main file (will check for .bak file)
"""
try:
backup_path = filepath + '.bak'
if os.path.exists(backup_path):
# Check file age
file_time = datetime.fromtimestamp(os.path.getmtime(backup_path))
age = datetime.now() - file_time

if age > timedelta(days=1):
os.remove(backup_path)
self.log(f"Info: Cleaned up old backup: {backup_path}")

except (IOError, OSError) as e:
self.log(f"Warn: Failed to cleanup backup for {filepath}: {e}")

def cleanup(self, directory, pattern, retention_days):
"""
Remove files matching pattern older than retention period.

Args:
directory: Directory to search
pattern: Glob pattern for files to cleanup
retention_days: Number of days to retain files

Returns:
Number of files removed
"""
try:
if not os.path.exists(directory):
return 0

path = Path(directory)
cutoff_time = datetime.now() - timedelta(days=retention_days)
removed_count = 0

for file_path in path.glob(pattern):
try:
file_time = datetime.fromtimestamp(file_path.stat().st_mtime)
if file_time < cutoff_time:
file_path.unlink()
removed_count += 1
self.log(f"Info: Cleaned up old file: {file_path}")
except (IOError, OSError) as e:
self.log(f"Warn: Failed to remove {file_path}: {e}")

return removed_count

except Exception as e:
self.log(f"Error: Cleanup failed for {directory}/{pattern}: {e}")
return 0

def get_last_updated(self, filepath):
"""
Get last_updated timestamp from JSON file.

Args:
filepath: Path to JSON file

Returns:
ISO 8601 timestamp string or None
"""
data = self.load(filepath)
if data and 'last_updated' in data:
return data['last_updated']
return None
Loading