-
-
Notifications
You must be signed in to change notification settings - Fork 123
Expand file tree
/
Copy pathpersistent_store.py
More file actions
189 lines (152 loc) · 6.03 KB
/
persistent_store.py
File metadata and controls
189 lines (152 loc) · 6.03 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
# -----------------------------------------------------------------------------
# Predbat Home Battery System
# Copyright Trefor Southwell 2026 - All Rights Reserved
# This application maybe used for personal use only and not for commercial use
# -----------------------------------------------------------------------------
# fmt: off
# pylint: disable=consider-using-f-string
# pylint: disable=line-too-long
# pylint: disable=attribute-defined-outside-init
"""
Base class for persistent JSON file storage with backup and cleanup.
Provides common functionality for components needing to store state across restarts.
"""
import json
import os
from datetime import datetime, timedelta
from pathlib import Path
class PersistentStore:
"""
Abstract base class for persistent JSON file storage.
Handles load/save with backup, cleanup of old files, and automatic timestamping.
"""
def __init__(self, base):
"""Initialize with reference to base PredBat instance"""
self.base = base
self.log = base.log
def load(self, filepath):
"""
Load data from JSON file with automatic backup restoration on corruption.
Args:
filepath: Path to JSON file to load
Returns:
Loaded data dict or None if file doesn't exist or is corrupted
"""
try:
if not os.path.exists(filepath):
return None
with open(filepath, 'r') as f:
data = json.load(f)
return data
except (json.JSONDecodeError, IOError) as e:
self.log(f"Warn: Failed to load {filepath}: {e}")
# Try to restore from backup
backup_path = filepath + '.bak'
if os.path.exists(backup_path):
try:
self.log(f"Warn: Attempting to restore from backup: {backup_path}")
with open(backup_path, 'r') as f:
data = json.load(f)
self.log(f"Warn: Successfully restored from backup")
return data
except (json.JSONDecodeError, IOError) as e2:
self.log(f"Error: Backup restoration failed: {e2}")
return None
def save(self, filepath, data, backup=True):
"""
Save data to JSON file with automatic backup and timestamp.
Args:
filepath: Path to JSON file to save
data: Dict to save (will add last_updated timestamp)
backup: Whether to backup existing file before overwrite
Returns:
True if successful, False otherwise
"""
try:
# Add timestamp
data['last_updated'] = datetime.now().astimezone().isoformat()
# Create directory if needed
os.makedirs(os.path.dirname(filepath), exist_ok=True)
# Backup existing file if requested
if backup and os.path.exists(filepath):
self.backup_file(filepath)
# Write new file
with open(filepath, 'w') as f:
json.dump(data, f, indent=2)
# Cleanup old backups
self.cleanup_backups(filepath)
return True
except (IOError, OSError) as e:
self.log(f"Error: Failed to save {filepath}: {e}")
return False
def backup_file(self, filepath):
"""
Create backup copy of file.
Args:
filepath: Path to file to backup
"""
try:
backup_path = filepath + '.bak'
if os.path.exists(filepath):
import shutil
shutil.copy2(filepath, backup_path)
except (IOError, OSError) as e:
self.log(f"Warn: Failed to backup {filepath}: {e}")
def cleanup_backups(self, filepath):
"""
Remove backup files older than 1 day.
Args:
filepath: Path to main file (will check for .bak file)
"""
try:
backup_path = filepath + '.bak'
if os.path.exists(backup_path):
# Check file age
file_time = datetime.fromtimestamp(os.path.getmtime(backup_path))
age = datetime.now() - file_time
if age > timedelta(days=1):
os.remove(backup_path)
self.log(f"Info: Cleaned up old backup: {backup_path}")
except (IOError, OSError) as e:
self.log(f"Warn: Failed to cleanup backup for {filepath}: {e}")
def cleanup(self, directory, pattern, retention_days):
"""
Remove files matching pattern older than retention period.
Args:
directory: Directory to search
pattern: Glob pattern for files to cleanup
retention_days: Number of days to retain files
Returns:
Number of files removed
"""
try:
if not os.path.exists(directory):
return 0
path = Path(directory)
cutoff_time = datetime.now() - timedelta(days=retention_days)
removed_count = 0
for file_path in path.glob(pattern):
try:
file_time = datetime.fromtimestamp(file_path.stat().st_mtime)
if file_time < cutoff_time:
file_path.unlink()
removed_count += 1
self.log(f"Info: Cleaned up old file: {file_path}")
except (IOError, OSError) as e:
self.log(f"Warn: Failed to remove {file_path}: {e}")
return removed_count
except Exception as e:
self.log(f"Error: Cleanup failed for {directory}/{pattern}: {e}")
return 0
def get_last_updated(self, filepath):
"""
Get last_updated timestamp from JSON file.
Args:
filepath: Path to JSON file
Returns:
ISO 8601 timestamp string or None
"""
data = self.load(filepath)
if data and 'last_updated' in data:
return data['last_updated']
return None