diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
index 3b1a32fb2..db0989a14 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.yml
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -29,9 +29,7 @@ body:
import numpy as np
import flixopt as fx
- fx.CONFIG.Logging.console = True
- fx.CONFIG.Logging.level = 'DEBUG'
- fx.CONFIG.apply()
+ fx.CONFIG.Logging.enable_console('DEBUG')
flow_system = fx.FlowSystem(pd.date_range('2020-01-01', periods=3, freq='h'))
flow_system.add_elements(
diff --git a/CHANGELOG.md b/CHANGELOG.md
index b5e54c233..68aaa8a60 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -72,7 +72,6 @@ If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOp
### 📦 Dependencies
### 📝 Docs
-- Added missing examples to docs.
### 👷 Development
@@ -80,6 +79,100 @@ If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOp
---
+## [4.1.0] - 2025-11-21
+
+**Summary**: Logging migrated from loguru to standard Python logging for stability and security. Simpler API with convenient presets.
+
+!!! info "Migration Required?"
+ **Most users**: No action needed (silent by default). Methods like `CONFIG.exploring()`, `CONFIG.debug()`, etc. continue to work exactly as before.
+ **If you customized logging**: Simple API update (see migration below).
+ **If you used loguru directly**: Breaking change (loguru only in v3.6.0-v4.0.0, ~4 days).
+
+If upgrading from v2.x, see the [v3.0.0 release notes](https://github.com/flixOpt/flixOpt/releases/tag/v3.0.0) and [Migration Guide](https://flixopt.github.io/flixopt/latest/user-guide/migration-guide-v3/).
+
+### ✨ Added
+
+**New logging presets**:
+```python
+CONFIG.production('app.log') # File-only, no console/plots
+```
+
+**New logging methods**:
+- `CONFIG.Logging.enable_console(level, colored, stream)` - Console output with colors
+- `CONFIG.Logging.enable_file(level, path, max_bytes, backup_count)` - File logging with rotation
+- `CONFIG.Logging.disable()` - Disable all logging
+- `CONFIG.Logging.set_colors(log_colors)` - Customize colors
+
+**Enhanced formatting**:
+- Multi-line messages with box borders (┌─, │, └─)
+- Exception tracebacks with proper indentation
+- Timestamps: `2025-11-21 14:30:45.123`
+
+### 💥 Breaking Changes
+
+**Logging migration** (edge cases only):
+
+| Old (v3.6.0-v4.0.0) | New (v4.1.0+) |
+|---------------------|---------------|
+| `CONFIG.Logging.level = 'INFO'`
`CONFIG.Logging.console = True`
`CONFIG.apply()` | `CONFIG.Logging.enable_console('INFO')`
or `CONFIG.exploring()` |
+| `CONFIG.Logging.file = 'app.log'` | `CONFIG.Logging.enable_file('INFO', 'app.log')` |
+| `logger.opt(lazy=True)` | Built-in (automatic) |
+
+**Migration**:
+```python
+# Before (v3.6.0-v4.0.0)
+CONFIG.Logging.level = 'INFO'
+CONFIG.Logging.console = True
+CONFIG.apply()
+
+# After (v4.1.0+)
+CONFIG.Logging.enable_console('INFO') # or CONFIG.exploring()
+```
+
+### ♻️ Changed
+
+- Replaced loguru with Python `logging` + optional `colorlog` for colors
+- Configuration immediate (no `CONFIG.apply()` needed)
+- Log format: `[dimmed timestamp] [colored level] │ message`
+- Logs to `stdout` by default (configurable)
+- SUCCESS level preserved (green, level 25)
+- Performance: Expensive operations guarded with `logger.isEnabledFor()` checks
+
+### 🗑️ Deprecated
+
+- `change_logging_level(level)` → Use `CONFIG.Logging.enable_console(level)`. Removal in v5.0.0.
+
+### 🔥 Removed
+
+**CONFIG methods/attributes**:
+- `CONFIG.apply()` → Use helper methods directly
+- `CONFIG.Logging.level`, `.console`, `.file` → Use `enable_console()`/`enable_file()`
+- `CONFIG.Logging.verbose_tracebacks`, `.rich`, `.Colors`, `.date_format`, `.format`, `.console_width`, `.show_path`, `.show_logger_name` → Use standard logging
+- loguru features (`logger.opt()`, etc.)
+
+### 🐛 Fixed
+
+- `TypeError` in `check_bounds()` with loguru-style formatting
+- Exception tracebacks not appearing in custom formatters
+- Inconsistent formatting between console and file logs
+
+### 🔒 Security
+
+- Removed loguru dependency for reduced supply chain risk
+
+### 📦 Dependencies
+
+- **Removed:** `loguru >= 0.7.0`
+- **Added:** `colorlog >= 6.8.0, < 7` (optional)
+
+### 📝 Docs
+
+- Preset comparison table in `CONFIG.Logging` docstring
+- Color customization examples
+- Migration guide with before/after code
+
+---
+
Until here -->
## [4.0.0] - 2025-11-19
diff --git a/docs/getting-started.md b/docs/getting-started.md
index 5841de3a4..cd558ce79 100644
--- a/docs/getting-started.md
+++ b/docs/getting-started.md
@@ -24,20 +24,25 @@ pip install "flixopt[full]"
## Logging
-FlixOpt uses [loguru](https://loguru.readthedocs.io/) for logging. Logging is silent by default but can be easily configured. For beginners, use our internal convenience methods. Experts can use loguru directly.
+FlixOpt uses Python's standard logging module with optional colored output via [colorlog](https://github.com/borntyping/python-colorlog). Logging is silent by default but can be easily configured.
```python
from flixopt import CONFIG
-# Enable console logging
-CONFIG.Logging.console = True
-CONFIG.Logging.level = 'INFO'
-CONFIG.apply()
+# Enable colored console logging
+CONFIG.Logging.enable_console('INFO')
# Or use a preset configuration for exploring
CONFIG.exploring()
```
+For advanced logging configuration, you can use Python's standard logging module directly:
+
+```python
+import logging
+logging.basicConfig(level=logging.DEBUG)
+```
+
For more details on logging configuration, see the [`CONFIG.Logging`][flixopt.config.CONFIG.Logging] documentation.
## Basic Workflow
diff --git a/docs/user-guide/migration-guide-v3.md b/docs/user-guide/migration-guide-v3.md
index 2a9cab97a..4c7959e8f 100644
--- a/docs/user-guide/migration-guide-v3.md
+++ b/docs/user-guide/migration-guide-v3.md
@@ -89,12 +89,12 @@ Terminology changed and sharing system inverted: effects now "pull" shares.
### Other Changes
-| Category | Old (v2.x) | New (v3.0.0) |
-|----------|------------|--------------|
-| System model class | `SystemModel` | `FlowSystemModel` |
-| Element submodel | `Model` | `Submodel` |
-| Logging default | Enabled | Disabled |
-| Enable logging | (default) | `fx.CONFIG.Logging.console = True; fx.CONFIG.apply()` |
+| Category | Old (v2.x) | New (v3.0.0+) |
+|------------------------|------------|---------------|
+| System model class | `SystemModel` | `FlowSystemModel` |
+| Element submodel | `Model` | `Submodel` |
+| Logging default | Enabled | Disabled (silent) |
+| Enable console logging | (default) | `fx.CONFIG.Logging.enable_console('INFO')` or `fx.CONFIG.exploring()` |
---
diff --git a/examples/05_Two-stage-optimization/two_stage_optimization.py b/examples/05_Two-stage-optimization/two_stage_optimization.py
index 6c7b20276..b61af3b2a 100644
--- a/examples/05_Two-stage-optimization/two_stage_optimization.py
+++ b/examples/05_Two-stage-optimization/two_stage_optimization.py
@@ -7,15 +7,17 @@
While the final optimum might differ from the global optimum, the solving will be much faster.
"""
+import logging
import pathlib
import timeit
import pandas as pd
import xarray as xr
-from loguru import logger
import flixopt as fx
+logger = logging.getLogger('flixopt')
+
if __name__ == '__main__':
fx.CONFIG.exploring()
diff --git a/flixopt/__init__.py b/flixopt/__init__.py
index 6f0dbfe5d..17b3fdc1a 100644
--- a/flixopt/__init__.py
+++ b/flixopt/__init__.py
@@ -2,6 +2,7 @@
This module bundles all common functionality of flixopt and sets up the logging
"""
+import logging
import warnings
from importlib.metadata import PackageNotFoundError, version
@@ -60,6 +61,11 @@
'solvers',
]
+# Initialize logger with default configuration (silent: WARNING level, NullHandler)
+logger = logging.getLogger('flixopt')
+logger.setLevel(logging.WARNING)
+logger.addHandler(logging.NullHandler())
+
# === Runtime warning suppression for third-party libraries ===
# These warnings are from dependencies and cannot be fixed by end users.
# They are suppressed at runtime to provide a cleaner user experience.
diff --git a/flixopt/aggregation.py b/flixopt/aggregation.py
index 99b13bd45..adaed3e42 100644
--- a/flixopt/aggregation.py
+++ b/flixopt/aggregation.py
@@ -6,12 +6,12 @@
from __future__ import annotations
import copy
+import logging
import pathlib
import timeit
from typing import TYPE_CHECKING
import numpy as np
-from loguru import logger
try:
import tsam.timeseriesaggregation as tsam
@@ -37,6 +37,8 @@
from .elements import Component
from .flow_system import FlowSystem
+logger = logging.getLogger('flixopt')
+
class Aggregation:
"""
@@ -104,7 +106,8 @@ def cluster(self) -> None:
self.aggregated_data = self.tsam.predictOriginalData()
self.clustering_duration_seconds = timeit.default_timer() - start_time # Zeit messen:
- logger.opt(lazy=True).info('{result}', result=lambda: self.describe_clusters())
+ if logger.isEnabledFor(logging.INFO):
+ logger.info(self.describe_clusters())
def describe_clusters(self) -> str:
description = {}
diff --git a/flixopt/calculation.py b/flixopt/calculation.py
index 2977f5a02..ee6742c22 100644
--- a/flixopt/calculation.py
+++ b/flixopt/calculation.py
@@ -10,6 +10,7 @@
from __future__ import annotations
+import logging
import math
import pathlib
import sys
@@ -19,7 +20,6 @@
from typing import TYPE_CHECKING, Annotated, Any
import numpy as np
-from loguru import logger
from tqdm import tqdm
from . import io as fx_io
@@ -39,6 +39,8 @@
from .solvers import _Solver
from .structure import FlowSystemModel
+logger = logging.getLogger('flixopt')
+
class Calculation:
"""
@@ -255,11 +257,9 @@ def solve(
# Log the formatted output
should_log = log_main_results if log_main_results is not None else CONFIG.Solving.log_main_results
- if should_log:
- logger.opt(lazy=True).info(
- '{result}',
- result=lambda: f'{" Main Results ":#^80}\n'
- + fx_io.format_yaml_string(self.main_results, compact_numeric_lists=True),
+ if should_log and logger.isEnabledFor(logging.INFO):
+ logger.info(
+ f'{" Main Results ":#^80}\n' + fx_io.format_yaml_string(self.main_results, compact_numeric_lists=True)
)
self.results = CalculationResults.from_calculation(self)
diff --git a/flixopt/color_processing.py b/flixopt/color_processing.py
index 9d874e027..2959acc82 100644
--- a/flixopt/color_processing.py
+++ b/flixopt/color_processing.py
@@ -6,12 +6,15 @@
from __future__ import annotations
+import logging
+
import matplotlib.colors as mcolors
import matplotlib.pyplot as plt
import plotly.express as px
-from loguru import logger
from plotly.exceptions import PlotlyError
+logger = logging.getLogger('flixopt')
+
def _rgb_string_to_hex(color: str) -> str:
"""Convert Plotly RGB/RGBA string format to hex.
diff --git a/flixopt/components.py b/flixopt/components.py
index cf6cb4082..a7f8b6314 100644
--- a/flixopt/components.py
+++ b/flixopt/components.py
@@ -4,20 +4,21 @@
from __future__ import annotations
+import logging
import warnings
from typing import TYPE_CHECKING, Literal
import numpy as np
import xarray as xr
-from loguru import logger
from . import io as fx_io
+from .config import DEPRECATION_REMOVAL_VERSION
from .core import PlausibilityError
from .elements import Component, ComponentModel, Flow
from .features import InvestmentModel, PiecewiseModel
from .interface import InvestParameters, OnOffParameters, PiecewiseConversion
from .modeling import BoundingPatterns
-from .structure import DEPRECATION_REMOVAL_VERSION, FlowSystemModel, register_class_for_io
+from .structure import FlowSystemModel, register_class_for_io
if TYPE_CHECKING:
import linopy
@@ -25,6 +26,8 @@
from .flow_system import FlowSystem
from .types import Numeric_PS, Numeric_TPS
+logger = logging.getLogger('flixopt')
+
@register_class_for_io
class LinearConverter(Component):
diff --git a/flixopt/config.py b/flixopt/config.py
index 07d7e24a9..824f80b75 100644
--- a/flixopt/config.py
+++ b/flixopt/config.py
@@ -1,31 +1,142 @@
from __future__ import annotations
+import logging
import os
-import sys
import warnings
+from logging.handlers import RotatingFileHandler
from pathlib import Path
from types import MappingProxyType
from typing import Literal
-from loguru import logger
+try:
+ import colorlog
+ from colorlog.escape_codes import escape_codes
-__all__ = ['CONFIG', 'change_logging_level']
+ COLORLOG_AVAILABLE = True
+except ImportError:
+ COLORLOG_AVAILABLE = False
+ escape_codes = None
+
+__all__ = ['CONFIG', 'change_logging_level', 'MultilineFormatter']
+
+if COLORLOG_AVAILABLE:
+ __all__.append('ColoredMultilineFormatter')
+
+# Add custom SUCCESS level (between INFO and WARNING)
+SUCCESS_LEVEL = 25
+logging.addLevelName(SUCCESS_LEVEL, 'SUCCESS')
+
+# Deprecation removal version - update this when planning the next major version
+DEPRECATION_REMOVAL_VERSION = '5.0.0'
+
+
+def _success(self, message, *args, **kwargs):
+ """Log a message with severity 'SUCCESS'."""
+ if self.isEnabledFor(SUCCESS_LEVEL):
+ self._log(SUCCESS_LEVEL, message, args, **kwargs)
+
+
+# Add success() method to Logger class
+logging.Logger.success = _success
+
+
+class MultilineFormatter(logging.Formatter):
+ """Custom formatter that handles multi-line messages with box-style borders."""
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ # Set default format with time
+ if not self._fmt:
+ self._fmt = '%(asctime)s %(levelname)-8s │ %(message)s'
+ self._style = logging.PercentStyle(self._fmt)
+
+ def format(self, record):
+ """Format multi-line messages with box-style borders for better readability."""
+ # Split into lines
+ lines = record.getMessage().split('\n')
+
+ # Add exception info if present (critical for logger.exception())
+ if record.exc_info:
+ lines.extend(self.formatException(record.exc_info).split('\n'))
+ if record.stack_info:
+ lines.extend(record.stack_info.rstrip().split('\n'))
+
+ # Format time with date and milliseconds (YYYY-MM-DD HH:MM:SS.mmm)
+ # formatTime doesn't support %f, so use datetime directly
+ import datetime
+
+ dt = datetime.datetime.fromtimestamp(record.created)
+ time_str = dt.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]
+
+ # Single line - return standard format
+ if len(lines) == 1:
+ level_str = f'{record.levelname: <8}'
+ return f'{time_str} {level_str} │ {lines[0]}'
+
+ # Multi-line - use box format
+ level_str = f'{record.levelname: <8}'
+ result = f'{time_str} {level_str} │ ┌─ {lines[0]}'
+ indent = ' ' * 23 # 23 spaces for time with date (YYYY-MM-DD HH:MM:SS.mmm)
+ for line in lines[1:-1]:
+ result += f'\n{indent} {" " * 8} │ │ {line}'
+ result += f'\n{indent} {" " * 8} │ └─ {lines[-1]}'
+
+ return result
+
+
+if COLORLOG_AVAILABLE:
+
+ class ColoredMultilineFormatter(colorlog.ColoredFormatter):
+ """Colored formatter with multi-line message support."""
+
+ def format(self, record):
+ """Format multi-line messages with colors and box-style borders."""
+ # Split into lines
+ lines = record.getMessage().split('\n')
+
+ # Add exception info if present (critical for logger.exception())
+ if record.exc_info:
+ lines.extend(self.formatException(record.exc_info).split('\n'))
+ if record.stack_info:
+ lines.extend(record.stack_info.rstrip().split('\n'))
+
+ # Format time with date and milliseconds (YYYY-MM-DD HH:MM:SS.mmm)
+ import datetime
+
+ # Use thin attribute for timestamp
+ dim = escape_codes['thin']
+ reset = escape_codes['reset']
+ # formatTime doesn't support %f, so use datetime directly
+ dt = datetime.datetime.fromtimestamp(record.created)
+ time_str = dt.strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]
+ time_formatted = f'{dim}{time_str}{reset}'
+
+ # Get the color for this level
+ log_colors = self.log_colors
+ level_name = record.levelname
+ color_name = log_colors.get(level_name, '')
+ color = escape_codes.get(color_name, '')
+
+ level_str = f'{level_name: <8}'
+
+ # Single line - return standard colored format
+ if len(lines) == 1:
+ return f'{time_formatted} {color}{level_str}{reset} │ {lines[0]}'
+
+ # Multi-line - use box format with colors
+ result = f'{time_formatted} {color}{level_str}{reset} │ {color}┌─ {lines[0]}{reset}'
+ indent = ' ' * 23 # 23 spaces for time with date (YYYY-MM-DD HH:MM:SS.mmm)
+ for line in lines[1:-1]:
+ result += f'\n{dim}{indent}{reset} {" " * 8} │ {color}│ {line}{reset}'
+ result += f'\n{dim}{indent}{reset} {" " * 8} │ {color}└─ {lines[-1]}{reset}'
+
+ return result
# SINGLE SOURCE OF TRUTH - immutable to prevent accidental modification
_DEFAULTS = MappingProxyType(
{
'config_name': 'flixopt',
- 'logging': MappingProxyType(
- {
- 'level': 'INFO',
- 'file': None,
- 'console': False,
- 'max_file_size': 10_485_760, # 10MB
- 'backup_count': 5,
- 'verbose_tracebacks': False,
- }
- ),
'modeling': MappingProxyType(
{
'big': 10_000_000,
@@ -58,13 +169,8 @@
class CONFIG:
"""Configuration for flixopt library.
- Always call ``CONFIG.apply()`` after changes.
-
- Note:
- flixopt uses `loguru `_ for logging.
-
Attributes:
- Logging: Logging configuration.
+ Logging: Logging configuration (see CONFIG.Logging for details).
Modeling: Optimization modeling parameters.
Solving: Solver configuration and default parameters.
Plotting: Plotting configuration.
@@ -72,72 +178,290 @@ class CONFIG:
Examples:
```python
- CONFIG.Logging.console = True
- CONFIG.Logging.level = 'DEBUG'
- CONFIG.apply()
- ```
-
- Load from YAML file:
-
- ```yaml
- logging:
- level: DEBUG
- console: true
- file: app.log
- solving:
- mip_gap: 0.001
- time_limit_seconds: 600
+ # Quick logging setup
+ CONFIG.Logging.enable_console('INFO')
+
+ # Or use presets (affects logging, plotting, solver output)
+ CONFIG.exploring() # Interactive exploration
+ CONFIG.debug() # Troubleshooting
+ CONFIG.production() # Production deployment
+ CONFIG.silent() # No output
+
+ # Adjust other settings
+ CONFIG.Solving.mip_gap = 0.001
+ CONFIG.Plotting.default_dpi = 600
```
"""
class Logging:
- """Logging configuration.
-
- Silent by default. Enable via ``console=True`` or ``file='path'``.
-
- Attributes:
- level: Logging level (DEBUG, INFO, SUCCESS, WARNING, ERROR, CRITICAL).
- file: Log file path for file logging (None to disable).
- console: Enable console output (True/'stdout' or 'stderr').
- max_file_size: Max file size in bytes before rotation.
- backup_count: Number of backup files to keep.
- verbose_tracebacks: Show detailed tracebacks with variable values.
+ """Logging configuration helpers.
+
+ flixopt is silent by default (WARNING level, no handlers).
+
+ Quick Start - Use Presets:
+ These presets configure logging along with plotting and solver output:
+
+ | Preset | Console Logs | File Logs | Plots | Solver Output | Use Case |
+ |--------|-------------|-----------|-------|---------------|----------|
+ | ``CONFIG.exploring()`` | INFO (colored) | No | Browser | Yes | Interactive exploration |
+ | ``CONFIG.debug()`` | DEBUG (colored) | No | Default | Yes | Troubleshooting |
+ | ``CONFIG.production('app.log')`` | No | INFO | No | No | Production deployments |
+ | ``CONFIG.silent()`` | No | No | No | No | Silent operation |
+
+ Examples:
+ ```python
+ CONFIG.exploring() # Start exploring interactively
+ CONFIG.debug() # See everything for troubleshooting
+ CONFIG.production('logs/prod.log') # Production mode
+ ```
+
+ Direct Control - Logging Only:
+ For fine-grained control of logging without affecting other settings:
+
+ Methods:
+ - ``enable_console(level='INFO', colored=True, stream=None)``
+ - ``enable_file(level='INFO', path='flixopt.log', max_bytes=10MB, backup_count=5)``
+ - ``disable()`` - Remove all handlers
+ - ``set_colors(log_colors)`` - Customize level colors
+
+ Examples:
+ ```python
+ # Console and file logging
+ CONFIG.Logging.enable_console('INFO')
+ CONFIG.Logging.enable_file('DEBUG', 'debug.log')
+
+ # Customize colors
+ CONFIG.Logging.set_colors(
+ {
+ 'INFO': 'bold_white',
+ 'SUCCESS': 'bold_green,bg_black',
+ 'CRITICAL': 'bold_white,bg_red',
+ }
+ )
+
+ # Non-colored output
+ CONFIG.Logging.enable_console('INFO', colored=False)
+ ```
+
+ Advanced Customization:
+ For full control, use Python's standard logging or create custom formatters:
- Examples:
```python
- # Enable console logging
- CONFIG.Logging.console = True
- CONFIG.Logging.level = 'DEBUG'
- CONFIG.apply()
-
- # File logging with rotation
- CONFIG.Logging.file = 'app.log'
- CONFIG.Logging.max_file_size = 5_242_880 # 5MB
- CONFIG.apply()
-
- # Console to stderr
- CONFIG.Logging.console = 'stderr'
- CONFIG.apply()
- ```
+ # Custom formatter
+ from flixopt.config import ColoredMultilineFormatter
+ import colorlog, logging
- Note:
- For advanced formatting or custom loguru configuration,
- use loguru's API directly after calling CONFIG.apply():
+ handler = colorlog.StreamHandler()
+ handler.setFormatter(ColoredMultilineFormatter(...))
+ logging.getLogger('flixopt').addHandler(handler)
- ```python
- from loguru import logger
+ # Or standard Python logging
+ import logging
- CONFIG.apply() # Basic setup
- logger.add('custom.log', format='{time} {message}')
+ logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
```
+
+ Note:
+ Default formatters (MultilineFormatter and ColoredMultilineFormatter)
+ provide pretty output with box borders for multi-line messages.
"""
- level: Literal['DEBUG', 'INFO', 'SUCCESS', 'WARNING', 'ERROR', 'CRITICAL'] = _DEFAULTS['logging']['level']
- file: str | None = _DEFAULTS['logging']['file']
- console: bool | Literal['stdout', 'stderr'] = _DEFAULTS['logging']['console']
- max_file_size: int = _DEFAULTS['logging']['max_file_size']
- backup_count: int = _DEFAULTS['logging']['backup_count']
- verbose_tracebacks: bool = _DEFAULTS['logging']['verbose_tracebacks']
+ @classmethod
+ def enable_console(cls, level: str | int = 'INFO', colored: bool = True, stream=None) -> None:
+ """Enable colored console logging.
+
+ Args:
+ level: Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL or logging constant)
+ colored: Use colored output if colorlog is available (default: True)
+ stream: Output stream (default: sys.stdout). Can be sys.stdout or sys.stderr.
+
+ Note:
+ For full control over formatting, use logging.basicConfig() instead.
+
+ Examples:
+ ```python
+ # Colored output to stdout (default)
+ CONFIG.Logging.enable_console('INFO')
+
+ # Plain text output
+ CONFIG.Logging.enable_console('INFO', colored=False)
+
+ # Log to stderr instead
+ import sys
+
+ CONFIG.Logging.enable_console('INFO', stream=sys.stderr)
+
+ # Using logging constants
+ import logging
+
+ CONFIG.Logging.enable_console(logging.DEBUG)
+ ```
+ """
+ import sys
+
+ logger = logging.getLogger('flixopt')
+
+ # Convert string level to logging constant
+ if isinstance(level, str):
+ level = getattr(logging, level.upper())
+
+ logger.setLevel(level)
+
+ # Default to stdout
+ if stream is None:
+ stream = sys.stdout
+
+ # Remove existing console handlers to avoid duplicates
+ logger.handlers = [
+ h
+ for h in logger.handlers
+ if not isinstance(h, logging.StreamHandler) or isinstance(h, RotatingFileHandler)
+ ]
+
+ if colored and COLORLOG_AVAILABLE:
+ handler = colorlog.StreamHandler(stream)
+ handler.setFormatter(
+ ColoredMultilineFormatter(
+ '%(log_color)s%(levelname)-8s%(reset)s %(message)s',
+ log_colors={
+ 'DEBUG': 'cyan',
+ 'INFO': '', # No color - use default terminal color
+ 'SUCCESS': 'green',
+ 'WARNING': 'yellow',
+ 'ERROR': 'red',
+ 'CRITICAL': 'bold_red',
+ },
+ )
+ )
+ else:
+ handler = logging.StreamHandler(stream)
+ handler.setFormatter(MultilineFormatter('%(levelname)-8s %(message)s'))
+
+ logger.addHandler(handler)
+ logger.propagate = False # Don't propagate to root
+
+ @classmethod
+ def enable_file(
+ cls,
+ level: str | int = 'INFO',
+ path: str | Path = 'flixopt.log',
+ max_bytes: int = 10 * 1024 * 1024,
+ backup_count: int = 5,
+ ) -> None:
+ """Enable file logging with rotation. Removes all existing file handlers!
+
+ Args:
+ level: Log level (DEBUG, INFO, WARNING, ERROR, CRITICAL or logging constant)
+ path: Path to log file (default: 'flixopt.log')
+ max_bytes: Maximum file size before rotation in bytes (default: 10MB)
+ backup_count: Number of backup files to keep (default: 5)
+
+ Note:
+ For full control over formatting and handlers, use logging module directly.
+
+ Examples:
+ ```python
+ # Basic file logging
+ CONFIG.Logging.enable_file('INFO', 'app.log')
+
+ # With custom rotation
+ CONFIG.Logging.enable_file('DEBUG', 'debug.log', max_bytes=50 * 1024 * 1024, backup_count=10)
+ ```
+ """
+ logger = logging.getLogger('flixopt')
+
+ # Convert string level to logging constant
+ if isinstance(level, str):
+ level = getattr(logging, level.upper())
+
+ logger.setLevel(level)
+
+ # Remove existing file handlers to avoid duplicates, keep all non-file handlers (including custom handlers)
+ logger.handlers = [
+ h for h in logger.handlers if not isinstance(h, (logging.FileHandler, RotatingFileHandler))
+ ]
+
+ # Create log directory if needed
+ log_path = Path(path)
+ log_path.parent.mkdir(parents=True, exist_ok=True)
+
+ handler = RotatingFileHandler(path, maxBytes=max_bytes, backupCount=backup_count)
+ handler.setFormatter(MultilineFormatter())
+
+ logger.addHandler(handler)
+ logger.propagate = False # Don't propagate to root
+
+ @classmethod
+ def disable(cls) -> None:
+ """Disable all flixopt logging.
+
+ Examples:
+ ```python
+ CONFIG.Logging.disable()
+ ```
+ """
+ logger = logging.getLogger('flixopt')
+ logger.handlers.clear()
+ logger.setLevel(logging.CRITICAL)
+
+ @classmethod
+ def set_colors(cls, log_colors: dict[str, str]) -> None:
+ """Customize log level colors for console output.
+
+ This updates the colors for the current console handler.
+ If no console handler exists, this does nothing.
+
+ Args:
+ log_colors: Dictionary mapping log levels to color names.
+ Colors can be comma-separated for multiple attributes
+ (e.g., 'bold_red,bg_white').
+
+ Available colors:
+ - Basic: black, red, green, yellow, blue, purple, cyan, white
+ - Bold: bold_red, bold_green, bold_yellow, bold_blue, etc.
+ - Light: light_red, light_green, light_yellow, light_blue, etc.
+ - Backgrounds: bg_red, bg_green, bg_light_red, etc.
+ - Combined: 'bold_white,bg_red' for white text on red background
+
+ Examples:
+ ```python
+ # Enable console first
+ CONFIG.Logging.enable_console('INFO')
+
+ # Then customize colors
+ CONFIG.Logging.set_colors(
+ {
+ 'DEBUG': 'cyan',
+ 'INFO': 'bold_white',
+ 'SUCCESS': 'bold_green',
+ 'WARNING': 'bold_yellow,bg_black', # Yellow on black
+ 'ERROR': 'bold_red',
+ 'CRITICAL': 'bold_white,bg_red', # White on red
+ }
+ )
+ ```
+
+ Note:
+ Requires colorlog to be installed. Has no effect on file handlers.
+ """
+ if not COLORLOG_AVAILABLE:
+ warnings.warn('colorlog is not installed. Colors cannot be customized.', stacklevel=2)
+ return
+
+ logger = logging.getLogger('flixopt')
+
+ # Find and update ColoredMultilineFormatter
+ for handler in logger.handlers:
+ if isinstance(handler, logging.StreamHandler):
+ formatter = handler.formatter
+ if isinstance(formatter, ColoredMultilineFormatter):
+ formatter.log_colors = log_colors
+ return
+
+ warnings.warn(
+ 'No ColoredMultilineFormatter found. Call CONFIG.Logging.enable_console() with colored=True first.',
+ stacklevel=2,
+ )
class Modeling:
"""Optimization modeling parameters.
@@ -167,7 +491,6 @@ class Solving:
CONFIG.Solving.mip_gap = 0.001
CONFIG.Solving.time_limit_seconds = 600
CONFIG.Solving.log_to_console = False
- CONFIG.apply()
```
"""
@@ -193,15 +516,10 @@ class Plotting:
Examples:
```python
- # Set consistent theming
- CONFIG.Plotting.plotly_template = 'plotly_dark'
- CONFIG.apply()
-
# Configure default export and color settings
CONFIG.Plotting.default_dpi = 600
CONFIG.Plotting.default_sequential_colorscale = 'plasma'
CONFIG.Plotting.default_qualitative_colorscale = 'Dark24'
- CONFIG.apply()
```
"""
@@ -215,11 +533,20 @@ class Plotting:
config_name: str = _DEFAULTS['config_name']
@classmethod
- def reset(cls):
- """Reset all configuration values to defaults."""
- for key, value in _DEFAULTS['logging'].items():
- setattr(cls.Logging, key, value)
+ def reset(cls) -> None:
+ """Reset all configuration values to defaults.
+ This resets modeling, solving, and plotting settings to their default values,
+ and disables all logging handlers (back to silent mode).
+
+ Examples:
+ ```python
+ CONFIG.debug() # Enable debug mode
+ # ... do some work ...
+ CONFIG.reset() # Back to defaults (silent)
+ ```
+ """
+ # Reset settings
for key, value in _DEFAULTS['modeling'].items():
setattr(cls.Modeling, key, value)
@@ -230,78 +557,9 @@ def reset(cls):
setattr(cls.Plotting, key, value)
cls.config_name = _DEFAULTS['config_name']
- cls.apply()
-
- @classmethod
- def apply(cls):
- """Apply current configuration to logging system."""
- valid_levels = ['DEBUG', 'INFO', 'SUCCESS', 'WARNING', 'ERROR', 'CRITICAL']
- if cls.Logging.level.upper() not in valid_levels:
- raise ValueError(f"Invalid log level '{cls.Logging.level}'. Must be one of: {', '.join(valid_levels)}")
-
- if cls.Logging.max_file_size <= 0:
- raise ValueError('max_file_size must be positive')
-
- if cls.Logging.backup_count < 0:
- raise ValueError('backup_count must be non-negative')
-
- if cls.Logging.console not in (False, True, 'stdout', 'stderr'):
- raise ValueError(f"console must be False, True, 'stdout', or 'stderr', got {cls.Logging.console}")
-
- _setup_logging(
- default_level=cls.Logging.level,
- log_file=cls.Logging.file,
- console=cls.Logging.console,
- max_file_size=cls.Logging.max_file_size,
- backup_count=cls.Logging.backup_count,
- verbose_tracebacks=cls.Logging.verbose_tracebacks,
- )
-
- @classmethod
- def load_from_file(cls, config_file: str | Path):
- """Load configuration from YAML file and apply it.
- Args:
- config_file: Path to the YAML configuration file.
-
- Raises:
- FileNotFoundError: If the config file does not exist.
- """
- # Import here to avoid circular import
- from . import io as fx_io
-
- config_path = Path(config_file)
- if not config_path.exists():
- raise FileNotFoundError(f'Config file not found: {config_file}')
-
- config_dict = fx_io.load_yaml(config_path)
- cls._apply_config_dict(config_dict)
-
- cls.apply()
-
- @classmethod
- def _apply_config_dict(cls, config_dict: dict):
- """Apply configuration dictionary to class attributes.
-
- Args:
- config_dict: Dictionary containing configuration values.
- """
- for key, value in config_dict.items():
- if key == 'logging' and isinstance(value, dict):
- for nested_key, nested_value in value.items():
- if hasattr(cls.Logging, nested_key):
- setattr(cls.Logging, nested_key, nested_value)
- elif key == 'modeling' and isinstance(value, dict):
- for nested_key, nested_value in value.items():
- setattr(cls.Modeling, nested_key, nested_value)
- elif key == 'solving' and isinstance(value, dict):
- for nested_key, nested_value in value.items():
- setattr(cls.Solving, nested_key, nested_value)
- elif key == 'plotting' and isinstance(value, dict):
- for nested_key, nested_value in value.items():
- setattr(cls.Plotting, nested_key, nested_value)
- elif hasattr(cls, key):
- setattr(cls, key, value)
+ # Reset logging to default (silent)
+ cls.Logging.disable()
@classmethod
def to_dict(cls) -> dict:
@@ -312,14 +570,6 @@ def to_dict(cls) -> dict:
"""
return {
'config_name': cls.config_name,
- 'logging': {
- 'level': cls.Logging.level,
- 'file': cls.Logging.file,
- 'console': cls.Logging.console,
- 'max_file_size': cls.Logging.max_file_size,
- 'backup_count': cls.Logging.backup_count,
- 'verbose_tracebacks': cls.Logging.verbose_tracebacks,
- },
'modeling': {
'big': cls.Modeling.big,
'epsilon': cls.Modeling.epsilon,
@@ -345,45 +595,83 @@ def to_dict(cls) -> dict:
def silent(cls) -> type[CONFIG]:
"""Configure for silent operation.
- Disables console logging, solver output, and result logging
- for clean production runs. Does not show plots. Automatically calls apply().
+ Disables all logging, solver output, and result logging
+ for clean production runs. Does not show plots.
+
+ Examples:
+ ```python
+ CONFIG.silent()
+ # Now run optimizations with no output
+ result = optimization.solve()
+ ```
"""
- cls.Logging.console = False
+ cls.Logging.disable()
cls.Plotting.default_show = False
- cls.Logging.file = None
cls.Solving.log_to_console = False
cls.Solving.log_main_results = False
- cls.apply()
return cls
@classmethod
def debug(cls) -> type[CONFIG]:
"""Configure for debug mode with verbose output.
- Enables console logging at DEBUG level, verbose tracebacks,
- and all solver output for troubleshooting. Automatically calls apply().
+ Enables console logging at DEBUG level and all solver output for troubleshooting.
+
+ Examples:
+ ```python
+ CONFIG.debug()
+ # See detailed DEBUG logs and full solver output
+ optimization.solve()
+ ```
"""
- cls.Logging.console = True
- cls.Logging.level = 'DEBUG'
- cls.Logging.verbose_tracebacks = True
+ cls.Logging.enable_console('DEBUG')
cls.Solving.log_to_console = True
cls.Solving.log_main_results = True
- cls.apply()
return cls
@classmethod
def exploring(cls) -> type[CONFIG]:
- """Configure for exploring flixopt
+ """Configure for exploring flixopt.
Enables console logging at INFO level and all solver output.
- Also enables browser plotting for plotly with showing plots per default
+ Also enables browser plotting for plotly with showing plots per default.
+
+ Examples:
+ ```python
+ CONFIG.exploring()
+ # Perfect for interactive sessions
+ optimization.solve() # Shows INFO logs and solver output
+ result.plot() # Opens plots in browser
+ ```
"""
- cls.Logging.console = True
- cls.Logging.level = 'INFO'
+ cls.Logging.enable_console('INFO')
cls.Solving.log_to_console = True
cls.Solving.log_main_results = True
cls.browser_plotting()
- cls.apply()
+ return cls
+
+ @classmethod
+ def production(cls, log_file: str | Path = 'flixopt.log') -> type[CONFIG]:
+ """Configure for production use.
+
+ Enables file logging only (no console output), disables plots,
+ and disables solver console output for clean production runs.
+
+ Args:
+ log_file: Path to log file (default: 'flixopt.log')
+
+ Examples:
+ ```python
+ CONFIG.production('production.log')
+ # Logs to file, no console output
+ optimization.solve()
+ ```
+ """
+ cls.Logging.disable() # Clear any console handlers
+ cls.Logging.enable_file('INFO', log_file)
+ cls.Plotting.default_show = False
+ cls.Solving.log_to_console = False
+ cls.Solving.log_main_results = False
return cls
@classmethod
@@ -394,9 +682,14 @@ def browser_plotting(cls) -> type[CONFIG]:
and viewing interactive plots. Does NOT modify CONFIG.Plotting settings.
Respects FLIXOPT_CI environment variable if set.
+
+ Examples:
+ ```python
+ CONFIG.browser_plotting()
+ result.plot() # Opens in browser instead of inline
+ ```
"""
cls.Plotting.default_show = True
- cls.apply()
# Only set to True if environment variable hasn't overridden it
if 'FLIXOPT_CI' not in os.environ:
@@ -407,129 +700,21 @@ def browser_plotting(cls) -> type[CONFIG]:
return cls
-def _format_multiline(record):
- """Format multi-line messages with box-style borders for better readability.
-
- Single-line messages use standard format.
- Multi-line messages use boxed format with ┌─, │, └─ characters.
-
- Note: Escapes curly braces in messages to prevent format string errors.
- """
- # Escape curly braces in message to prevent format string errors
- message = record['message'].replace('{', '{{').replace('}', '}}')
- lines = message.split('\n')
-
- # Format timestamp and level
- time_str = record['time'].strftime('%Y-%m-%d %H:%M:%S.%f')[:-3] # milliseconds
- level_str = f'{record["level"].name: <8}'
-
- # Single line messages - standard format
- if len(lines) == 1:
- result = f'{time_str} | {level_str} | {message}\n'
- if record['exception']:
- result += '{exception}'
- return result
-
- # Multi-line messages - boxed format
- indent = ' ' * len(time_str) # Match timestamp length
-
- # Build the boxed output
- result = f'{time_str} | {level_str} | ┌─ {lines[0]}\n'
- for line in lines[1:-1]:
- result += f'{indent} | {" " * 8} | │ {line}\n'
- result += f'{indent} | {" " * 8} | └─ {lines[-1]}\n'
-
- # Add exception info if present
- if record['exception']:
- result += '\n{exception}'
-
- return result
-
-
-def _setup_logging(
- default_level: Literal['DEBUG', 'INFO', 'SUCCESS', 'WARNING', 'ERROR', 'CRITICAL'] = 'INFO',
- log_file: str | None = None,
- console: bool | Literal['stdout', 'stderr'] = False,
- max_file_size: int = 10_485_760,
- backup_count: int = 5,
- verbose_tracebacks: bool = False,
-) -> None:
- """Internal function to setup logging - use CONFIG.apply() instead.
-
- Configures loguru logger with console and/or file handlers.
- Multi-line messages are automatically formatted with box-style borders.
-
- Args:
- default_level: Logging level for the logger.
- log_file: Path to log file (None to disable file logging).
- console: Enable console logging (True/'stdout' or 'stderr').
- max_file_size: Maximum log file size in bytes before rotation.
- backup_count: Number of backup log files to keep.
- verbose_tracebacks: If True, show detailed tracebacks with variable values.
- """
- # Remove all existing handlers
- logger.remove()
-
- # Console handler with multi-line formatting
- if console:
- stream = sys.stdout if console is True or console == 'stdout' else sys.stderr
- logger.add(
- stream,
- format=_format_multiline,
- level=default_level.upper(),
- colorize=True,
- backtrace=verbose_tracebacks,
- diagnose=verbose_tracebacks,
- enqueue=False,
- )
-
- # File handler with rotation (plain format for files)
- if log_file:
- log_path = Path(log_file)
- try:
- log_path.parent.mkdir(parents=True, exist_ok=True)
- except PermissionError as e:
- raise PermissionError(f"Cannot create log directory '{log_path.parent}': Permission denied") from e
-
- logger.add(
- log_file,
- format='{time:YYYY-MM-DD HH:mm:ss.SSS} | {level: <8} | {message}',
- level=default_level.upper(),
- colorize=False,
- rotation=max_file_size,
- retention=backup_count,
- encoding='utf-8',
- backtrace=verbose_tracebacks,
- diagnose=verbose_tracebacks,
- enqueue=False,
- )
-
-
-def change_logging_level(level_name: Literal['DEBUG', 'INFO', 'SUCCESS', 'WARNING', 'ERROR', 'CRITICAL']):
+def change_logging_level(level_name: str | int) -> None:
"""Change the logging level for the flixopt logger.
- .. deprecated:: 2.1.11
- Use ``CONFIG.Logging.level = level_name`` and ``CONFIG.apply()`` instead.
- This function will be removed in version 3.0.0.
-
Args:
- level_name: The logging level to set.
+ level_name: The logging level to set (DEBUG, INFO, WARNING, ERROR, CRITICAL or logging constant).
Examples:
>>> change_logging_level('DEBUG') # deprecated
>>> # Use this instead:
- >>> CONFIG.Logging.level = 'DEBUG'
- >>> CONFIG.apply()
+ >>> CONFIG.Logging.enable_console('DEBUG')
"""
warnings.warn(
- 'change_logging_level is deprecated and will be removed in version 3.0.0. '
- 'Use CONFIG.Logging.level = level_name and CONFIG.apply() instead.',
+ f'change_logging_level is deprecated and will be removed in version {DEPRECATION_REMOVAL_VERSION} '
+ 'Use CONFIG.Logging.enable_console(level) instead.',
DeprecationWarning,
stacklevel=2,
)
- CONFIG.Logging.level = level_name.upper()
- CONFIG.apply()
-
-
-# Initialize default config
-CONFIG.apply()
+ CONFIG.Logging.enable_console(level_name)
diff --git a/flixopt/core.py b/flixopt/core.py
index d41af7e2e..71e389315 100644
--- a/flixopt/core.py
+++ b/flixopt/core.py
@@ -3,6 +3,7 @@
It provides Datatypes, logging functionality, and some functions to transform data structures.
"""
+import logging
import warnings
from itertools import permutations
from typing import Any, Literal, Union
@@ -10,16 +11,15 @@
import numpy as np
import pandas as pd
import xarray as xr
-from loguru import logger
+from .config import DEPRECATION_REMOVAL_VERSION
from .types import NumericOrBool
+logger = logging.getLogger('flixopt')
+
FlowSystemDimensions = Literal['time', 'period', 'scenario']
"""Possible dimensions of a FlowSystem."""
-# Deprecation removal version - update this when planning the next major version
-DEPRECATION_REMOVAL_VERSION = '5.0.0'
-
class PlausibilityError(Exception):
"""Error for a failing Plausibility check."""
diff --git a/flixopt/effects.py b/flixopt/effects.py
index 02181920a..43afcd0cf 100644
--- a/flixopt/effects.py
+++ b/flixopt/effects.py
@@ -7,6 +7,7 @@
from __future__ import annotations
+import logging
import warnings
from collections import deque
from typing import TYPE_CHECKING, Literal
@@ -14,7 +15,6 @@
import linopy
import numpy as np
import xarray as xr
-from loguru import logger
from .core import PlausibilityError
from .features import ShareAllocationModel
@@ -26,6 +26,8 @@
from .flow_system import FlowSystem
from .types import Effect_PS, Effect_TPS, Numeric_PS, Numeric_S, Numeric_TPS, Scalar
+logger = logging.getLogger('flixopt')
+
@register_class_for_io
class Effect(Element):
diff --git a/flixopt/elements.py b/flixopt/elements.py
index 2f63e8bdb..611b0bd9f 100644
--- a/flixopt/elements.py
+++ b/flixopt/elements.py
@@ -4,21 +4,20 @@
from __future__ import annotations
+import logging
import warnings
from typing import TYPE_CHECKING
import numpy as np
import xarray as xr
-from loguru import logger
from . import io as fx_io
-from .config import CONFIG
+from .config import CONFIG, DEPRECATION_REMOVAL_VERSION
from .core import PlausibilityError
from .features import InvestmentModel, OnOffModel
from .interface import InvestParameters, OnOffParameters
from .modeling import BoundingPatterns, ModelingPrimitives, ModelingUtilitiesAbstract
from .structure import (
- DEPRECATION_REMOVAL_VERSION,
Element,
ElementModel,
FlowSystemModel,
@@ -43,6 +42,8 @@
Scalar,
)
+logger = logging.getLogger('flixopt')
+
@register_class_for_io
class Component(Element):
diff --git a/flixopt/flow_system.py b/flixopt/flow_system.py
index f80f97dd3..63bb7b16d 100644
--- a/flixopt/flow_system.py
+++ b/flixopt/flow_system.py
@@ -4,6 +4,7 @@
from __future__ import annotations
+import logging
import warnings
from collections import defaultdict
from itertools import chain
@@ -12,7 +13,6 @@
import numpy as np
import pandas as pd
import xarray as xr
-from loguru import logger
from . import io as fx_io
from .config import CONFIG
@@ -34,6 +34,8 @@
from .types import Bool_TPS, Effect_TPS, Numeric_PS, Numeric_S, Numeric_TPS, NumericOrBool
+logger = logging.getLogger('flixopt')
+
class FlowSystem(Interface, CompositeContainerMixin[Element]):
"""
diff --git a/flixopt/interface.py b/flixopt/interface.py
index 55ac03b6b..852c3e8f8 100644
--- a/flixopt/interface.py
+++ b/flixopt/interface.py
@@ -5,16 +5,16 @@
from __future__ import annotations
+import logging
import warnings
from typing import TYPE_CHECKING, Any
import numpy as np
import pandas as pd
import xarray as xr
-from loguru import logger
-from .config import CONFIG
-from .structure import DEPRECATION_REMOVAL_VERSION, Interface, register_class_for_io
+from .config import CONFIG, DEPRECATION_REMOVAL_VERSION
+from .structure import Interface, register_class_for_io
if TYPE_CHECKING: # for type checking and preventing circular imports
from collections.abc import Iterator
@@ -22,6 +22,8 @@
from .flow_system import FlowSystem
from .types import Effect_PS, Effect_TPS, Numeric_PS, Numeric_TPS
+logger = logging.getLogger('flixopt')
+
@register_class_for_io
class Piece(Interface):
diff --git a/flixopt/io.py b/flixopt/io.py
index c8e4d0c3b..294822b7c 100644
--- a/flixopt/io.py
+++ b/flixopt/io.py
@@ -2,6 +2,7 @@
import inspect
import json
+import logging
import os
import pathlib
import re
@@ -14,13 +15,14 @@
import pandas as pd
import xarray as xr
import yaml
-from loguru import logger
if TYPE_CHECKING:
import linopy
from .types import Numeric_TPS
+logger = logging.getLogger('flixopt')
+
def remove_none_and_empty(obj):
"""Recursively removes None and empty dicts and lists values from a dictionary or list."""
diff --git a/flixopt/linear_converters.py b/flixopt/linear_converters.py
index 52c52463b..2ac60e70d 100644
--- a/flixopt/linear_converters.py
+++ b/flixopt/linear_converters.py
@@ -4,21 +4,24 @@
from __future__ import annotations
+import logging
import warnings
from typing import TYPE_CHECKING
import numpy as np
-from loguru import logger
from .components import LinearConverter
+from .config import DEPRECATION_REMOVAL_VERSION
from .core import TimeSeriesData
-from .structure import DEPRECATION_REMOVAL_VERSION, register_class_for_io
+from .structure import register_class_for_io
if TYPE_CHECKING:
from .elements import Flow
from .interface import OnOffParameters
from .types import Numeric_TPS
+logger = logging.getLogger('flixopt')
+
@register_class_for_io
class Boiler(LinearConverter):
@@ -1124,21 +1127,11 @@ def check_bounds(
if not np.all(value_arr > lower_bound):
logger.warning(
- "'{}.{}' <= lower bound {}. {}.min={}, shape={}",
- element_label,
- parameter_label,
- lower_bound,
- parameter_label,
- float(np.min(value_arr)),
- np.shape(value_arr),
+ f"'{element_label}.{parameter_label}' <= lower bound {lower_bound}. "
+ f'{parameter_label}.min={float(np.min(value_arr))}, shape={np.shape(value_arr)}'
)
if not np.all(value_arr < upper_bound):
logger.warning(
- "'{}.{}' >= upper bound {}. {}.max={}, shape={}",
- element_label,
- parameter_label,
- upper_bound,
- parameter_label,
- float(np.max(value_arr)),
- np.shape(value_arr),
+ f"'{element_label}.{parameter_label}' >= upper bound {upper_bound}. "
+ f'{parameter_label}.max={float(np.max(value_arr))}, shape={np.shape(value_arr)}'
)
diff --git a/flixopt/modeling.py b/flixopt/modeling.py
index ebe739a85..13b4c0e3e 100644
--- a/flixopt/modeling.py
+++ b/flixopt/modeling.py
@@ -1,11 +1,14 @@
+import logging
+
import linopy
import numpy as np
import xarray as xr
-from loguru import logger
from .config import CONFIG
from .structure import Submodel
+logger = logging.getLogger('flixopt')
+
class ModelingUtilitiesAbstract:
"""Utility functions for modeling calculations - leveraging xarray for temporal data"""
diff --git a/flixopt/network_app.py b/flixopt/network_app.py
index 446a2e7ce..d18bc44a8 100644
--- a/flixopt/network_app.py
+++ b/flixopt/network_app.py
@@ -1,11 +1,10 @@
from __future__ import annotations
+import logging
import socket
import threading
from typing import TYPE_CHECKING, Any
-from loguru import logger
-
try:
import dash_cytoscape as cyto
import dash_daq as daq
@@ -25,6 +24,8 @@
if TYPE_CHECKING:
from .flow_system import FlowSystem
+logger = logging.getLogger('flixopt')
+
# Configuration class for better organization
class VisualizationConfig:
@@ -779,7 +780,7 @@ def find_free_port(start_port=8050, end_port=8100):
server_thread = threading.Thread(target=server.serve_forever, daemon=True)
server_thread.start()
- print(f'Network visualization started on http://127.0.0.1:{port}/')
+ logger.success(f'Network visualization started on http://127.0.0.1:{port}/')
# Store server reference for cleanup
app.server_instance = server
diff --git a/flixopt/plotting.py b/flixopt/plotting.py
index 93f4dfc85..94959ecb5 100644
--- a/flixopt/plotting.py
+++ b/flixopt/plotting.py
@@ -26,6 +26,7 @@
from __future__ import annotations
import itertools
+import logging
import os
import pathlib
from typing import TYPE_CHECKING, Any, Literal
@@ -39,7 +40,6 @@
import plotly.graph_objects as go
import plotly.offline
import xarray as xr
-from loguru import logger
from .color_processing import process_colors
from .config import CONFIG
@@ -47,6 +47,8 @@
if TYPE_CHECKING:
import pyvis
+logger = logging.getLogger('flixopt')
+
# Define the colors for the 'portland' colorscale in matplotlib
_portland_colors = [
[12 / 255, 51 / 255, 131 / 255], # Dark blue
diff --git a/flixopt/results.py b/flixopt/results.py
index bc80ccf98..ccc36952f 100644
--- a/flixopt/results.py
+++ b/flixopt/results.py
@@ -2,6 +2,7 @@
import copy
import datetime
+import logging
import pathlib
import warnings
from typing import TYPE_CHECKING, Any, Literal
@@ -10,7 +11,6 @@
import numpy as np
import pandas as pd
import xarray as xr
-from loguru import logger
from . import io as fx_io
from . import plotting
@@ -27,6 +27,8 @@
from .calculation import Calculation, SegmentedCalculation
from .core import FlowSystemDimensions
+logger = logging.getLogger('flixopt')
+
def load_mapping_from_file(path: pathlib.Path) -> dict[str, str | list[str]]:
"""Load color mapping from JSON or YAML file.
@@ -342,18 +344,20 @@ def flow_system(self) -> FlowSystem:
Contains all input parameters."""
if self._flow_system is None:
# Temporarily disable all logging to suppress messages during restoration
- logger.disable('flixopt')
+ flixopt_logger = logging.getLogger('flixopt')
+ original_level = flixopt_logger.level
+ flixopt_logger.setLevel(logging.CRITICAL + 1) # Disable all logging
try:
self._flow_system = FlowSystem.from_dataset(self.flow_system_data)
self._flow_system._connect_network()
except Exception as e:
- logger.enable('flixopt') # Re-enable before logging critical message
+ flixopt_logger.setLevel(original_level) # Re-enable before logging
logger.critical(
f'Not able to restore FlowSystem from dataset. Some functionality is not availlable. {e}'
)
raise _FlowSystemRestorationError(f'Not able to restore FlowSystem from dataset. {e}') from e
finally:
- logger.enable('flixopt')
+ flixopt_logger.setLevel(original_level) # Restore original level
return self._flow_system
def setup_colors(
diff --git a/flixopt/solvers.py b/flixopt/solvers.py
index a9a3afb46..e5db61192 100644
--- a/flixopt/solvers.py
+++ b/flixopt/solvers.py
@@ -4,13 +4,14 @@
from __future__ import annotations
+import logging
from dataclasses import dataclass, field
from typing import Any, ClassVar
-from loguru import logger
-
from flixopt.config import CONFIG
+logger = logging.getLogger('flixopt')
+
@dataclass
class _Solver:
diff --git a/flixopt/structure.py b/flixopt/structure.py
index a6df1233a..62067e2ba 100644
--- a/flixopt/structure.py
+++ b/flixopt/structure.py
@@ -6,6 +6,7 @@
from __future__ import annotations
import inspect
+import logging
import re
from dataclasses import dataclass
from difflib import get_close_matches
@@ -21,10 +22,10 @@
import numpy as np
import pandas as pd
import xarray as xr
-from loguru import logger
from . import io as fx_io
-from .core import DEPRECATION_REMOVAL_VERSION, FlowSystemDimensions, TimeSeriesData, get_dataarray_stats
+from .config import DEPRECATION_REMOVAL_VERSION
+from .core import FlowSystemDimensions, TimeSeriesData, get_dataarray_stats
if TYPE_CHECKING: # for type checking and preventing circular imports
import pathlib
@@ -34,6 +35,7 @@
from .flow_system import FlowSystem
from .types import Effect_TPS, Numeric_TPS, NumericOrBool
+logger = logging.getLogger('flixopt')
CLASS_REGISTRY = {}
diff --git a/pyproject.toml b/pyproject.toml
index 7d9f36b35..d7510b1ce 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -40,7 +40,7 @@ dependencies = [
"netcdf4 >= 1.6.1, < 2",
# Utilities
"pyyaml >= 6.0.0, < 7",
- "loguru >= 0.7.0, < 1",
+ "colorlog >= 6.8.0, < 7",
"tqdm >= 4.66.0, < 5",
"tomli >= 2.0.1, < 3; python_version < '3.11'", # Only needed with python 3.10 or earlier
# Default solver
diff --git a/test_deprecations.py b/test_deprecations.py
index 6cd59b678..d530841b2 100644
--- a/test_deprecations.py
+++ b/test_deprecations.py
@@ -5,7 +5,7 @@
import pytest
import flixopt as fx
-from flixopt.core import DEPRECATION_REMOVAL_VERSION
+from flixopt.config import DEPRECATION_REMOVAL_VERSION
from flixopt.linear_converters import CHP, Boiler, HeatPump, HeatPumpWithSource, Power2Heat
diff --git a/tests/test_config.py b/tests/test_config.py
index 7de58e8aa..b09e0c5d9 100644
--- a/tests/test_config.py
+++ b/tests/test_config.py
@@ -1,15 +1,15 @@
"""Tests for the config module."""
+import logging
import sys
-from pathlib import Path
import pytest
-from loguru import logger
-from flixopt.config import _DEFAULTS, CONFIG, _setup_logging
+from flixopt.config import CONFIG, MultilineFormatter
+
+logger = logging.getLogger('flixopt')
-# All tests in this class will run in the same worker to prevent issues with global config altering
@pytest.mark.xdist_group(name='config_tests')
class TestConfigModule:
"""Test the CONFIG class and logging setup."""
@@ -19,632 +19,211 @@ def setup_method(self):
CONFIG.reset()
def teardown_method(self):
- """Clean up after each test to prevent state leakage."""
+ """Clean up after each test."""
CONFIG.reset()
def test_config_defaults(self):
"""Test that CONFIG has correct default values."""
- assert CONFIG.Logging.level == 'INFO'
- assert CONFIG.Logging.file is None
- assert CONFIG.Logging.console is False
assert CONFIG.Modeling.big == 10_000_000
assert CONFIG.Modeling.epsilon == 1e-5
- assert CONFIG.Modeling.big_binary_bound == 100_000
assert CONFIG.Solving.mip_gap == 0.01
assert CONFIG.Solving.time_limit_seconds == 300
- assert CONFIG.Solving.log_to_console is True
- assert CONFIG.Solving.log_main_results is True
assert CONFIG.config_name == 'flixopt'
- def test_module_initialization(self, capfd):
- """Test that logging is initialized on module import."""
- # Apply config to ensure handlers are initialized
- CONFIG.apply()
- # With default config (console=False, file=None), logs should not appear
- logger.info('test message')
+ def test_silent_by_default(self, capfd):
+ """Test that flixopt is silent by default."""
+ logger.info('should not appear')
captured = capfd.readouterr()
- assert 'test message' not in captured.out
- assert 'test message' not in captured.err
-
- def test_config_apply_console(self, capfd):
- """Test applying config with console logging enabled."""
- CONFIG.Logging.console = True
- CONFIG.Logging.level = 'DEBUG'
- CONFIG.apply()
-
- # Test that DEBUG level logs appear in console output
- test_message = 'test debug message 12345'
- logger.debug(test_message)
+ assert 'should not appear' not in captured.out
+
+ def test_enable_console_logging(self, capfd):
+ """Test enabling console logging."""
+ CONFIG.Logging.enable_console('INFO')
+ logger.info('test message')
captured = capfd.readouterr()
- assert test_message in captured.out or test_message in captured.err
+ assert 'test message' in captured.out
- def test_config_apply_file(self, tmp_path):
- """Test applying config with file logging enabled."""
+ def test_enable_file_logging(self, tmp_path):
+ """Test enabling file logging."""
log_file = tmp_path / 'test.log'
- CONFIG.Logging.file = str(log_file)
- CONFIG.Logging.level = 'WARNING'
- CONFIG.apply()
+ CONFIG.Logging.enable_file('INFO', str(log_file))
+ logger.info('test file message')
- # Test that WARNING level logs appear in the file
- test_message = 'test warning message 67890'
- logger.warning(test_message)
- # Loguru may buffer, so we need to ensure the log is written
- import time
-
- time.sleep(0.1) # Small delay to ensure write
assert log_file.exists()
- log_content = log_file.read_text()
- assert test_message in log_content
+ assert 'test file message' in log_file.read_text()
- def test_config_apply_console_stderr(self, capfd):
- """Test applying config with console logging to stderr."""
- CONFIG.Logging.console = 'stderr'
- CONFIG.Logging.level = 'INFO'
- CONFIG.apply()
+ def test_console_and_file_together(self, tmp_path, capfd):
+ """Test logging to both console and file."""
+ log_file = tmp_path / 'test.log'
+ CONFIG.Logging.enable_console('INFO')
+ CONFIG.Logging.enable_file('INFO', str(log_file))
- # Test that INFO logs appear in stderr
- test_message = 'test info to stderr 11111'
- logger.info(test_message)
- captured = capfd.readouterr()
- assert test_message in captured.err
+ logger.info('test both')
- def test_config_apply_multiple_changes(self, capfd):
- """Test applying multiple config changes at once."""
- CONFIG.Logging.console = True
- CONFIG.Logging.level = 'ERROR'
- CONFIG.apply()
+ # Check both outputs
+ assert 'test both' in capfd.readouterr().out
+ assert 'test both' in log_file.read_text()
- # Test that ERROR level logs appear but lower levels don't
- logger.warning('warning should not appear')
- logger.error('error should appear 22222')
- captured = capfd.readouterr()
- output = captured.out + captured.err
- assert 'warning should not appear' not in output
- assert 'error should appear 22222' in output
+ def test_disable_logging(self, capfd):
+ """Test disabling logging."""
+ CONFIG.Logging.enable_console('INFO')
+ CONFIG.Logging.disable()
- def test_config_to_dict(self):
- """Test converting CONFIG to dictionary."""
- CONFIG.Logging.level = 'DEBUG'
- CONFIG.Logging.console = True
-
- config_dict = CONFIG.to_dict()
-
- assert config_dict['config_name'] == 'flixopt'
- assert config_dict['logging']['level'] == 'DEBUG'
- assert config_dict['logging']['console'] is True
- assert config_dict['logging']['file'] is None
- assert 'modeling' in config_dict
- assert config_dict['modeling']['big'] == 10_000_000
- assert 'solving' in config_dict
- assert config_dict['solving']['mip_gap'] == 0.01
- assert config_dict['solving']['time_limit_seconds'] == 300
- assert config_dict['solving']['log_to_console'] is True
- assert config_dict['solving']['log_main_results'] is True
-
- def test_config_load_from_file(self, tmp_path):
- """Test loading configuration from YAML file."""
- config_file = tmp_path / 'config.yaml'
- config_content = """
-config_name: test_config
-logging:
- level: DEBUG
- console: true
- rich: false
-modeling:
- big: 20000000
- epsilon: 1e-6
-solving:
- mip_gap: 0.001
- time_limit_seconds: 600
- log_main_results: false
-"""
- config_file.write_text(config_content)
-
- CONFIG.load_from_file(config_file)
-
- assert CONFIG.config_name == 'test_config'
- assert CONFIG.Logging.level == 'DEBUG'
- assert CONFIG.Logging.console is True
- assert CONFIG.Modeling.big == 20000000
- # YAML may load epsilon as string, so convert for comparison
- assert float(CONFIG.Modeling.epsilon) == 1e-6
- assert CONFIG.Solving.mip_gap == 0.001
- assert CONFIG.Solving.time_limit_seconds == 600
- assert CONFIG.Solving.log_main_results is False
-
- def test_config_load_from_file_not_found(self):
- """Test that loading from non-existent file raises error."""
- with pytest.raises(FileNotFoundError):
- CONFIG.load_from_file('nonexistent_config.yaml')
-
- def test_config_load_from_file_partial(self, tmp_path):
- """Test loading partial configuration (should keep unspecified settings)."""
- config_file = tmp_path / 'partial_config.yaml'
- config_content = """
-logging:
- level: ERROR
-"""
- config_file.write_text(config_content)
-
- # Set a non-default value first
- CONFIG.Logging.console = True
- CONFIG.apply()
-
- CONFIG.load_from_file(config_file)
-
- # Should update level but keep other settings
- assert CONFIG.Logging.level == 'ERROR'
- # Verify console setting is preserved (not in YAML)
- assert CONFIG.Logging.console is True
-
- def test_setup_logging_silent_default(self, capfd):
- """Test that _setup_logging creates silent logger by default."""
- _setup_logging()
-
- # With default settings, logs should not appear
logger.info('should not appear')
- captured = capfd.readouterr()
- assert 'should not appear' not in captured.out
- assert 'should not appear' not in captured.err
-
- def test_setup_logging_with_console(self, capfd):
- """Test _setup_logging with console output."""
- _setup_logging(console=True, default_level='DEBUG')
-
- # Test that DEBUG logs appear in console
- test_message = 'debug console test 33333'
- logger.debug(test_message)
- captured = capfd.readouterr()
- assert test_message in captured.out or test_message in captured.err
+ assert 'should not appear' not in capfd.readouterr().out
+
+ def test_custom_success_level(self, capfd):
+ """Test custom SUCCESS log level."""
+ CONFIG.Logging.enable_console('INFO')
+ logger.success('success message')
+ assert 'success message' in capfd.readouterr().out
+
+ def test_multiline_formatting(self):
+ """Test that multi-line messages get box borders."""
+ formatter = MultilineFormatter()
+ record = logging.LogRecord('test', logging.INFO, '', 1, 'Line 1\nLine 2\nLine 3', (), None)
+ formatted = formatter.format(record)
+ assert '┌─' in formatted
+ assert '└─' in formatted
+
+ def test_console_stderr(self, capfd):
+ """Test logging to stderr."""
+ CONFIG.Logging.enable_console('INFO', stream=sys.stderr)
+ logger.info('stderr test')
+ assert 'stderr test' in capfd.readouterr().err
+
+ def test_non_colored_output(self, capfd):
+ """Test non-colored console output."""
+ CONFIG.Logging.enable_console('INFO', colored=False)
+ logger.info('plain text')
+ assert 'plain text' in capfd.readouterr().out
+
+ def test_preset_exploring(self, capfd):
+ """Test exploring preset."""
+ CONFIG.exploring()
+ logger.info('exploring')
+ assert 'exploring' in capfd.readouterr().out
+ assert CONFIG.Solving.log_to_console is True
- def test_setup_logging_clears_handlers(self, capfd):
- """Test that _setup_logging clears existing handlers."""
- # Setup a handler first
- _setup_logging(console=True)
+ def test_preset_debug(self, capfd):
+ """Test debug preset."""
+ CONFIG.debug()
+ logger.debug('debug')
+ assert 'debug' in capfd.readouterr().out
- # Call setup again with different settings - should clear and re-add
- _setup_logging(console=True, default_level='ERROR')
+ def test_preset_production(self, tmp_path):
+ """Test production preset."""
+ log_file = tmp_path / 'prod.log'
+ CONFIG.production(str(log_file))
+ logger.info('production')
- # Verify new settings work: ERROR logs appear but INFO doesn't
- logger.info('info should not appear')
- logger.error('error should appear 44444')
- captured = capfd.readouterr()
- output = captured.out + captured.err
- assert 'info should not appear' not in output
- assert 'error should appear 44444' in output
+ assert log_file.exists()
+ assert 'production' in log_file.read_text()
+ assert CONFIG.Plotting.default_show is False
- def test_change_logging_level_removed(self):
- """Test that change_logging_level function is deprecated but still exists."""
- # This function is deprecated - users should use CONFIG.apply() instead
- import flixopt
+ def test_preset_silent(self, capfd):
+ """Test silent preset."""
+ CONFIG.silent()
+ logger.info('should not appear')
+ assert 'should not appear' not in capfd.readouterr().out
- # Function should still exist but be deprecated
- assert hasattr(flixopt, 'change_logging_level')
+ def test_config_reset(self):
+ """Test that reset() restores defaults and disables logging."""
+ CONFIG.Modeling.big = 99999999
+ CONFIG.Logging.enable_console('DEBUG')
- # Should emit deprecation warning when called
- with pytest.warns(DeprecationWarning, match='change_logging_level is deprecated'):
- flixopt.change_logging_level('DEBUG')
-
- def test_public_api(self):
- """Test that CONFIG and change_logging_level are exported from config module."""
- from flixopt import config
-
- # CONFIG should be accessible
- assert hasattr(config, 'CONFIG')
-
- # change_logging_level should be accessible (but deprecated)
- assert hasattr(config, 'change_logging_level')
-
- # _setup_logging should exist but be marked as private
- assert hasattr(config, '_setup_logging')
-
- # merge_configs should not exist (was removed)
- assert not hasattr(config, 'merge_configs')
-
- def test_logging_levels(self, capfd):
- """Test all valid logging levels."""
- levels = ['DEBUG', 'INFO', 'SUCCESS', 'WARNING', 'ERROR', 'CRITICAL']
-
- for level in levels:
- CONFIG.Logging.level = level
- CONFIG.Logging.console = True
- CONFIG.apply()
-
- # Test that logs at the configured level appear
- test_message = f'test message at {level} 55555'
- getattr(logger, level.lower())(test_message)
- captured = capfd.readouterr()
- output = captured.out + captured.err
- assert test_message in output, f'Expected {level} message to appear'
-
- def test_file_handler_rotation(self, tmp_path):
- """Test that file handler rotation configuration is accepted."""
- log_file = tmp_path / 'rotating.log'
- CONFIG.Logging.file = str(log_file)
- CONFIG.Logging.max_file_size = 1024
- CONFIG.Logging.backup_count = 2
- CONFIG.apply()
-
- # Write some logs
- for i in range(10):
- logger.info(f'Log message {i}')
-
- # Verify file logging works
- import time
-
- time.sleep(0.1)
- assert log_file.exists(), 'Log file should be created'
-
- # Verify configuration values are preserved
- assert CONFIG.Logging.max_file_size == 1024
- assert CONFIG.Logging.backup_count == 2
-
- def test_custom_config_yaml_complete(self, tmp_path):
- """Test loading a complete custom configuration."""
- config_file = tmp_path / 'custom_config.yaml'
- config_content = """
-config_name: my_custom_config
-logging:
- level: CRITICAL
- console: true
- file: /tmp/custom.log
-modeling:
- big: 50000000
- epsilon: 1e-4
- big_binary_bound: 200000
-solving:
- mip_gap: 0.005
- time_limit_seconds: 900
- log_main_results: false
-"""
- config_file.write_text(config_content)
-
- CONFIG.load_from_file(config_file)
-
- # Check all settings were applied
- assert CONFIG.config_name == 'my_custom_config'
- assert CONFIG.Logging.level == 'CRITICAL'
- assert CONFIG.Logging.console is True
- assert CONFIG.Logging.file == '/tmp/custom.log'
- assert CONFIG.Modeling.big == 50000000
- assert float(CONFIG.Modeling.epsilon) == 1e-4
- assert CONFIG.Modeling.big_binary_bound == 200000
- assert CONFIG.Solving.mip_gap == 0.005
- assert CONFIG.Solving.time_limit_seconds == 900
- assert CONFIG.Solving.log_main_results is False
-
- # Verify logging was applied to both console and file
- import time
-
- test_message = 'critical test message 66666'
- logger.critical(test_message)
- time.sleep(0.1) # Small delay to ensure write
- # Check file exists and contains message
- log_file_path = tmp_path / 'custom.log'
- if not log_file_path.exists():
- # File might be at /tmp/custom.log as specified in config
- import os
-
- log_file_path = os.path.expanduser('/tmp/custom.log')
- # We can't reliably test the file at /tmp/custom.log in tests
- # So just verify critical level messages would appear at this level
- assert CONFIG.Logging.level == 'CRITICAL'
-
- def test_config_file_with_console_and_file(self, tmp_path):
- """Test configuration with both console and file logging enabled."""
- log_file = tmp_path / 'test.log'
- config_file = tmp_path / 'config.yaml'
- config_content = f"""
-logging:
- level: INFO
- console: true
- file: {log_file}
-"""
- config_file.write_text(config_content)
-
- CONFIG.load_from_file(config_file)
-
- # Verify logging to both console and file works
- import time
-
- test_message = 'info test both outputs 77777'
- logger.info(test_message)
- time.sleep(0.1) # Small delay to ensure write
- # Verify file logging works
- assert log_file.exists()
- log_content = log_file.read_text()
- assert test_message in log_content
+ CONFIG.reset()
- def test_config_to_dict_roundtrip(self, tmp_path):
- """Test that config can be saved to dict, modified, and restored."""
- # Set custom values
- CONFIG.Logging.level = 'WARNING'
- CONFIG.Logging.console = True
- CONFIG.Modeling.big = 99999999
+ assert CONFIG.Modeling.big == 10_000_000
+ assert len(logger.handlers) == 0
- # Save to dict
+ def test_config_to_dict(self):
+ """Test converting CONFIG to dictionary."""
config_dict = CONFIG.to_dict()
+ assert config_dict['modeling']['big'] == 10_000_000
+ assert config_dict['solving']['mip_gap'] == 0.01
- # Verify dict structure
- assert config_dict['logging']['level'] == 'WARNING'
- assert config_dict['logging']['console'] is True
- assert config_dict['modeling']['big'] == 99999999
-
- # Could be written to YAML and loaded back
- yaml_file = tmp_path / 'saved_config.yaml'
- import yaml
-
- with open(yaml_file, 'w') as f:
- yaml.dump(config_dict, f)
-
- # Reset config
- CONFIG.Logging.level = 'INFO'
- CONFIG.Logging.console = False
- CONFIG.Modeling.big = 10_000_000
-
- # Load back from file
- CONFIG.load_from_file(yaml_file)
-
- # Should match original values
- assert CONFIG.Logging.level == 'WARNING'
- assert CONFIG.Logging.console is True
- assert CONFIG.Modeling.big == 99999999
-
- def test_config_file_with_only_modeling(self, tmp_path):
- """Test config file that only sets modeling parameters."""
- config_file = tmp_path / 'modeling_only.yaml'
- config_content = """
-modeling:
- big: 999999
- epsilon: 0.001
-"""
- config_file.write_text(config_content)
-
- # Set logging config before loading
- original_level = CONFIG.Logging.level
- CONFIG.load_from_file(config_file)
-
- # Modeling should be updated
- assert CONFIG.Modeling.big == 999999
- assert float(CONFIG.Modeling.epsilon) == 0.001
-
- # Logging should keep default/previous values
- assert CONFIG.Logging.level == original_level
-
- def test_config_attribute_modification(self):
- """Test that config attributes can be modified directly."""
- # Store original values
- original_big = CONFIG.Modeling.big
- original_level = CONFIG.Logging.level
-
- # Modify attributes
+ def test_attribute_modification(self):
+ """Test modifying config attributes."""
CONFIG.Modeling.big = 12345678
- CONFIG.Modeling.epsilon = 1e-8
- CONFIG.Logging.level = 'DEBUG'
- CONFIG.Logging.console = True
+ CONFIG.Solving.mip_gap = 0.001
- # Verify modifications
assert CONFIG.Modeling.big == 12345678
- assert CONFIG.Modeling.epsilon == 1e-8
- assert CONFIG.Logging.level == 'DEBUG'
- assert CONFIG.Logging.console is True
-
- # Reset
- CONFIG.Modeling.big = original_big
- CONFIG.Logging.level = original_level
- CONFIG.Logging.console = False
-
- def test_logger_actually_logs(self, tmp_path):
- """Test that the logger actually writes log messages."""
- log_file = tmp_path / 'actual_test.log'
- CONFIG.Logging.file = str(log_file)
- CONFIG.Logging.level = 'DEBUG'
- CONFIG.apply()
-
- test_message = 'Test log message from config test'
- logger.debug(test_message)
-
- # Check that file was created and contains the message
- assert log_file.exists()
- log_content = log_file.read_text()
- assert test_message in log_content
+ assert CONFIG.Solving.mip_gap == 0.001
- def test_modeling_config_persistence(self):
- """Test that Modeling config is independent of Logging config."""
- # Set custom modeling values
- CONFIG.Modeling.big = 99999999
- CONFIG.Modeling.epsilon = 1e-8
+ def test_change_logging_level_deprecated(self):
+ """Test deprecated change_logging_level function."""
+ from flixopt import change_logging_level
- # Change and apply logging config
- CONFIG.Logging.console = True
- CONFIG.apply()
+ with pytest.warns(DeprecationWarning, match='change_logging_level is deprecated'):
+ change_logging_level('INFO')
- # Modeling values should be unchanged
- assert CONFIG.Modeling.big == 99999999
- assert CONFIG.Modeling.epsilon == 1e-8
+ def test_exception_logging(self, capfd):
+ """Test that exceptions are properly logged with tracebacks."""
+ CONFIG.Logging.enable_console('INFO')
- def test_config_reset(self):
- """Test that CONFIG.reset() restores all defaults."""
- # Modify all config values
- CONFIG.Logging.level = 'DEBUG'
- CONFIG.Logging.console = True
- CONFIG.Logging.file = '/tmp/test.log'
- CONFIG.Modeling.big = 99999999
- CONFIG.Modeling.epsilon = 1e-8
- CONFIG.Modeling.big_binary_bound = 500000
- CONFIG.Solving.mip_gap = 0.0001
- CONFIG.Solving.time_limit_seconds = 1800
- CONFIG.Solving.log_to_console = False
- CONFIG.Solving.log_main_results = False
- CONFIG.config_name = 'test_config'
-
- # Reset should restore all defaults
- CONFIG.reset()
+ try:
+ raise ValueError('Test exception')
+ except ValueError:
+ logger.exception('An error occurred')
- # Verify all values are back to defaults
- assert CONFIG.Logging.level == 'INFO'
- assert CONFIG.Logging.console is False
- assert CONFIG.Logging.file is None
- assert CONFIG.Modeling.big == 10_000_000
- assert CONFIG.Modeling.epsilon == 1e-5
- assert CONFIG.Modeling.big_binary_bound == 100_000
- assert CONFIG.Solving.mip_gap == 0.01
- assert CONFIG.Solving.time_limit_seconds == 300
- assert CONFIG.Solving.log_to_console is True
- assert CONFIG.Solving.log_main_results is True
- assert CONFIG.config_name == 'flixopt'
+ captured = capfd.readouterr().out
+ assert 'An error occurred' in captured
+ assert 'ValueError' in captured
+ assert 'Test exception' in captured
+ assert 'Traceback' in captured
- # Verify logging was also reset (default is no logging to console/file)
- # Test that logs don't appear with default config
- from io import StringIO
+ def test_exception_logging_non_colored(self, capfd):
+ """Test that exceptions are properly logged with tracebacks in non-colored mode."""
+ CONFIG.Logging.enable_console('INFO', colored=False)
- old_stdout = sys.stdout
- old_stderr = sys.stderr
- sys.stdout = StringIO()
- sys.stderr = StringIO()
try:
- logger.info('should not appear after reset')
- stdout_content = sys.stdout.getvalue()
- stderr_content = sys.stderr.getvalue()
- assert 'should not appear after reset' not in stdout_content
- assert 'should not appear after reset' not in stderr_content
- finally:
- sys.stdout = old_stdout
- sys.stderr = old_stderr
-
- def test_reset_matches_class_defaults(self):
- """Test that reset() values match the _DEFAULTS constants.
-
- This ensures the reset() method and class attribute defaults
- stay synchronized by using the same source of truth (_DEFAULTS).
- """
- # Modify all values to something different
- CONFIG.Logging.level = 'CRITICAL'
- CONFIG.Logging.file = '/tmp/test.log'
- CONFIG.Logging.console = True
- CONFIG.Modeling.big = 999999
- CONFIG.Modeling.epsilon = 1e-10
- CONFIG.Modeling.big_binary_bound = 999999
- CONFIG.Solving.mip_gap = 0.0001
- CONFIG.Solving.time_limit_seconds = 9999
- CONFIG.Solving.log_to_console = False
- CONFIG.Solving.log_main_results = False
- CONFIG.config_name = 'modified'
-
- # Verify values are actually different from defaults
- assert CONFIG.Logging.level != _DEFAULTS['logging']['level']
- assert CONFIG.Modeling.big != _DEFAULTS['modeling']['big']
- assert CONFIG.Solving.mip_gap != _DEFAULTS['solving']['mip_gap']
- assert CONFIG.Solving.log_to_console != _DEFAULTS['solving']['log_to_console']
-
- # Now reset
- CONFIG.reset()
+ raise ValueError('Test exception non-colored')
+ except ValueError:
+ logger.exception('An error occurred')
+
+ captured = capfd.readouterr().out
+ assert 'An error occurred' in captured
+ assert 'ValueError: Test exception non-colored' in captured
+ assert 'Traceback' in captured
+
+ def test_enable_file_preserves_custom_handlers(self, tmp_path, capfd):
+ """Test that enable_file preserves custom non-file handlers."""
+ # Add a custom console handler first
+ CONFIG.Logging.enable_console('INFO')
+ logger.info('console test')
+ assert 'console test' in capfd.readouterr().out
+
+ # Now add file logging - should keep the console handler
+ log_file = tmp_path / 'test.log'
+ CONFIG.Logging.enable_file('INFO', str(log_file))
- # Verify reset() restored exactly the _DEFAULTS values
- assert CONFIG.Logging.level == _DEFAULTS['logging']['level']
- assert CONFIG.Logging.file == _DEFAULTS['logging']['file']
- assert CONFIG.Logging.console == _DEFAULTS['logging']['console']
- assert CONFIG.Modeling.big == _DEFAULTS['modeling']['big']
- assert CONFIG.Modeling.epsilon == _DEFAULTS['modeling']['epsilon']
- assert CONFIG.Modeling.big_binary_bound == _DEFAULTS['modeling']['big_binary_bound']
- assert CONFIG.Solving.mip_gap == _DEFAULTS['solving']['mip_gap']
- assert CONFIG.Solving.time_limit_seconds == _DEFAULTS['solving']['time_limit_seconds']
- assert CONFIG.Solving.log_to_console == _DEFAULTS['solving']['log_to_console']
- assert CONFIG.Solving.log_main_results == _DEFAULTS['solving']['log_main_results']
- assert CONFIG.config_name == _DEFAULTS['config_name']
-
- def test_solving_config_defaults(self):
- """Test that CONFIG.Solving has correct default values."""
- assert CONFIG.Solving.mip_gap == 0.01
- assert CONFIG.Solving.time_limit_seconds == 300
- assert CONFIG.Solving.log_to_console is True
- assert CONFIG.Solving.log_main_results is True
-
- def test_solving_config_modification(self):
- """Test that CONFIG.Solving attributes can be modified."""
- # Modify solving config
- CONFIG.Solving.mip_gap = 0.005
- CONFIG.Solving.time_limit_seconds = 600
- CONFIG.Solving.log_main_results = False
- CONFIG.apply()
-
- # Verify modifications
- assert CONFIG.Solving.mip_gap == 0.005
- assert CONFIG.Solving.time_limit_seconds == 600
- assert CONFIG.Solving.log_main_results is False
-
- def test_solving_config_integration_with_solvers(self):
- """Test that solvers use CONFIG.Solving defaults."""
- from flixopt import solvers
-
- # Test with default config
- CONFIG.reset()
- solver1 = solvers.HighsSolver()
- assert solver1.mip_gap == CONFIG.Solving.mip_gap
- assert solver1.time_limit_seconds == CONFIG.Solving.time_limit_seconds
-
- # Modify config and create new solver
- CONFIG.Solving.mip_gap = 0.002
- CONFIG.Solving.time_limit_seconds = 900
- CONFIG.apply()
-
- solver2 = solvers.GurobiSolver()
- assert solver2.mip_gap == 0.002
- assert solver2.time_limit_seconds == 900
-
- # Explicit values should override config
- solver3 = solvers.HighsSolver(mip_gap=0.1, time_limit_seconds=60)
- assert solver3.mip_gap == 0.1
- assert solver3.time_limit_seconds == 60
-
- def test_solving_config_yaml_loading(self, tmp_path):
- """Test loading solving config from YAML file."""
- config_file = tmp_path / 'solving_config.yaml'
- config_content = """
-solving:
- mip_gap: 0.0001
- time_limit_seconds: 1200
- log_main_results: false
-"""
- config_file.write_text(config_content)
-
- CONFIG.load_from_file(config_file)
-
- assert CONFIG.Solving.mip_gap == 0.0001
- assert CONFIG.Solving.time_limit_seconds == 1200
- assert CONFIG.Solving.log_main_results is False
-
- def test_solving_config_in_to_dict(self):
- """Test that CONFIG.Solving is included in to_dict()."""
- CONFIG.Solving.mip_gap = 0.003
- CONFIG.Solving.time_limit_seconds = 450
- CONFIG.Solving.log_main_results = False
+ logger.info('both outputs')
- config_dict = CONFIG.to_dict()
+ # Check console still works
+ console_output = capfd.readouterr().out
+ assert 'both outputs' in console_output
+
+ # Check file was created and has the message
+ assert log_file.exists()
+ assert 'both outputs' in log_file.read_text()
- assert 'solving' in config_dict
- assert config_dict['solving']['mip_gap'] == 0.003
- assert config_dict['solving']['time_limit_seconds'] == 450
- assert config_dict['solving']['log_main_results'] is False
+ def test_enable_file_removes_duplicate_file_handlers(self, tmp_path):
+ """Test that enable_file removes existing file handlers to avoid duplicates."""
+ log_file = tmp_path / 'test.log'
- def test_solving_config_persistence(self):
- """Test that Solving config is independent of other configs."""
- # Set custom solving values
- CONFIG.Solving.mip_gap = 0.007
- CONFIG.Solving.time_limit_seconds = 750
+ # Enable file logging twice
+ CONFIG.Logging.enable_file('INFO', str(log_file))
+ CONFIG.Logging.enable_file('INFO', str(log_file))
- # Change and apply logging config
- CONFIG.Logging.console = True
- CONFIG.apply()
+ logger.info('duplicate test')
- # Solving values should be unchanged
- assert CONFIG.Solving.mip_gap == 0.007
- assert CONFIG.Solving.time_limit_seconds == 750
+ # Count file handlers - should only be 1
+ from logging.handlers import RotatingFileHandler
- # Change modeling config
- CONFIG.Modeling.big = 99999999
- CONFIG.apply()
+ file_handlers = [h for h in logger.handlers if isinstance(h, (logging.FileHandler, RotatingFileHandler))]
+ assert len(file_handlers) == 1
- # Solving values should still be unchanged
- assert CONFIG.Solving.mip_gap == 0.007
- assert CONFIG.Solving.time_limit_seconds == 750
+ # Message should appear only once in the file
+ log_content = log_file.read_text()
+ assert log_content.count('duplicate test') == 1