Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
40 commits
Select commit Hold shift + click to select a range
55c63bc
fix: right-align numeric cells, color name by ROI pen, add color pick…
Osayi-ANL Mar 13, 2026
a6b06ba
change: roi stats to right aligned
Osayi-ANL Mar 16, 2026
c3e8261
Merge pull request #59 from AdvancedPhotonSource/dev-osayi
Osayi-ANL Mar 16, 2026
2bfd75d
add: motor pos dropdown
Osayi-ANL Mar 16, 2026
0ad8bfb
add: single-frame axis projection mode to ROI plot dock
Osayi-ANL Mar 18, 2026
4f11eae
change: index for more modern look
Osayi-ANL Mar 18, 2026
8d46b72
Update February
Osayi-ANL Mar 18, 2026
29b131a
Merge branch 'wb-roi-sum-axis' into dev-osayi
Osayi-ANL Mar 18, 2026
30d851c
Merge pull request #61 from AdvancedPhotonSource/dev-osayi
Osayi-ANL Mar 18, 2026
5385975
change: pva_setup to workflow.py/.ui
Osayi-ANL Mar 18, 2026
1b90db5
fix: schema migration for missing profile columns, first-profile tree…
Osayi-ANL Mar 18, 2026
6117767
fix: hpc_rsm_consumer reads HKL config from settings.py instead of TO…
Osayi-ANL Mar 18, 2026
394243b
Merge pull request #62 from AdvancedPhotonSource/dev-osayi
Osayi-ANL Mar 20, 2026
ea291ed
add: log viewer in launcher to watch the logs coming through
Osayi-ANL Mar 20, 2026
413eb44
fix: config source radio selection to inline
Osayi-ANL Mar 20, 2026
4dd9d8c
add: version number for DashPVA
Osayi-ANL Mar 20, 2026
322e223
Database layer
Osayi-ANL Mar 23, 2026
defc419
add: processor file inputs as editable dropdowns populated from CONSU…
Osayi-ANL Mar 23, 2026
ea609c0
Merge pull request #63 from AdvancedPhotonSource/dev-osayi
Osayi-ANL Mar 23, 2026
5e04567
fix: consumer hpc seeding path
Osayi-ANL Mar 25, 2026
5f56025
fix: static records refresh in sim server
Osayi-ANL Mar 25, 2026
b65a4d8
fix: db init, seed path, detached ORM, and HPC consumer config guard
Osayi-ANL Mar 25, 2026
2227bd4
Merge pull request #64 from AdvancedPhotonSource/dev-osayi
Osayi-ANL Mar 25, 2026
109be03
fix: seeding for consumers
Osayi-ANL Mar 27, 2026
28074a7
Merge branch 'dev-test' into dev
Osayi-ANL Mar 27, 2026
98d7062
add: add: output override for where you want to save
Osayi-ANL Mar 27, 2026
ef7803e
change: launcher to have it's own ui
Osayi-ANL Apr 10, 2026
68ba58d
fix: setup ioc path to point to right path
Osayi-ANL Apr 10, 2026
9e21a42
fix: 3d rendering issue
Osayi-ANL Apr 10, 2026
bb959cf
fix: 3d rendewring issue, had to add the rsm_converter file
Osayi-ANL Apr 10, 2026
1535aef
fix: pva_reader to update the output
Osayi-ANL Apr 10, 2026
d9889c9
remove: config_filepath from PVAReader
Osayi-ANL Apr 13, 2026
261957c
remove: pv config path
Osayi-ANL Apr 13, 2026
e8977e8
remove: all other config path related variables
Osayi-ANL Apr 13, 2026
3da5b64
add: LogMixin
Osayi-ANL Apr 13, 2026
eaa2c10
remove: missed frames
Osayi-ANL Apr 13, 2026
2e32bce
fix: 3d plane duplicating
Osayi-ANL Apr 13, 2026
3aaaf9c
change: roi_plot_dock to use the entry/data/metadata/ca for the dropdown
Osayi-ANL Apr 15, 2026
ce6e2f9
add: position reading from axis
Osayi-ANL Apr 15, 2026
01c20d0
fix: PR #65 critical issues — settings, consumers, workspace_3d, seed…
pecomyint Apr 17, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 9 additions & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,13 @@ This file tracks the latest changes, features, and improvements in DashPVA.
### New Features
- Scan Monitor gives you an option to save or write temp
- Universal log system, all outputs go to logs/general.log
- ROI Plot dock: added Single Frame mode with axis projection (Proj X / Proj Y) to sum ROI counts along a chosen axis for the current frame
- Database to replace Toml configuration for PV's

### Fix
### Fix
- Profile import issue


## Latest Changes (February 2026)
### Added
- ROI calculated for math between specific ROI's
Expand All @@ -21,7 +24,11 @@ This file tracks the latest changes, features, and improvements in DashPVA.
- Conda supports hdf5plugin

### Change
- All placeholder variables raise NotImplementedError
- All placeholder methods raise NotImplementedError

### Fixed
- Vmin/Vmax now syncs with histogram in 2d workbench


## Latest Changes (January 2026)
- Workbench now supports loading compressed datasets for smoother analysis and smaller storage footprints. For legacy files, a converter will be provided to update the file structure so compression loads seamlessly.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -585,11 +585,15 @@ def setupMetadataPvs(self, metadataPv):
if self.caMetadataPvs:
if not os.environ.get('EPICS_DB_INCLUDE_PATH'):
pvDataLib = ctypes.util.find_library('pvData')
if not pvDataLib:
raise Exception('Cannot find dbd directory, please set EPICS_DB_INCLUDE_PATH environment variable to use CA metadata PVs.')
pvDataLib = os.path.realpath(pvDataLib)
epicsLibDir = os.path.dirname(pvDataLib)
dbdDir = os.path.realpath(f'{epicsLibDir}/../../dbd')
if pvDataLib:
pvDataLib = os.path.realpath(pvDataLib)
epicsLibDir = os.path.dirname(pvDataLib)
dbdDir = os.path.realpath(f'{epicsLibDir}/../../dbd')
else:
# Fallback: use dbd directory bundled with the pvaccess Python package
dbdDir = os.path.join(os.path.dirname(pva.__file__), 'dbd')
if not os.path.isdir(dbdDir):
raise Exception('Cannot find dbd directory, please set EPICS_DB_INCLUDE_PATH environment variable to use CA metadata PVs.')
os.environ['EPICS_DB_INCLUDE_PATH'] = dbdDir

print(f'CA Metadata PVs: {self.caMetadataPvs}')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -172,11 +172,15 @@ def setup_ca_ioc(records_dict) -> pva.CaIoc:
"""
if not os.environ.get('EPICS_DB_INCLUDE_PATH'):
pvDataLib = ctypes.util.find_library('pvData')
if not pvDataLib:
raise Exception('Cannot find dbd directory. Please set EPICS_DB_INCLUDE_PATH.')
pvDataLib = os.path.realpath(pvDataLib)
epicsLibDir = os.path.dirname(pvDataLib)
dbdDir = os.path.realpath('%s/../../dbd' % epicsLibDir)
if pvDataLib:
pvDataLib = os.path.realpath(pvDataLib)
epicsLibDir = os.path.dirname(pvDataLib)
dbdDir = os.path.realpath('%s/../../dbd' % epicsLibDir)
else:
# Fallback: use dbd directory bundled with the pvaccess Python package
dbdDir = os.path.join(os.path.dirname(pva.__file__), 'dbd')
if not os.path.isdir(dbdDir):
raise Exception('Cannot find dbd directory. Please set EPICS_DB_INCLUDE_PATH.')
os.environ['EPICS_DB_INCLUDE_PATH'] = dbdDir

# Create a temporary database file
Expand Down Expand Up @@ -261,6 +265,16 @@ def main() -> None:
for rec_name, rec_data in all_records.items():
update_full_record(caIoc, rec_name, rec_data)

# Static records that must be re-put each loop to keep their CA timestamps current
static_records = {
"PrimaryBeamDirection": primary_beam_direction_record,
"InplaneReferenceDirection": inplane_reference_direction_record,
"SampleSurfaceNormalDirection": sample_surface_normal_direction_record,
"6idb:spec:UB_matrix": ub_matrix_record,
"DetectorSetup": detector_setup_record,
"ScanOn": scan_on_record,
}

# For dynamic axis records, store their base positions
base_positions = {name: rec['Position'] for name, rec in axis_records.items()}
dynamic_records = {**axis_records, '6idb:spec:Energy':13.0,} #'6idb:spec:UB_matrix': caget('6idb:spec:UB_matrix')}
Expand All @@ -272,6 +286,11 @@ def main() -> None:
try:
while True:
elapsed = time.time() - start_time

# Re-put static records every loop so their timestamps stay current
for rec_name, rec_data in static_records.items():
update_full_record(caIoc, rec_name, rec_data)

for name, rec in dynamic_records.items():
# Update the Position field with a sine offset
new_position = 5 + (amplitude * math.sin(elapsed)) # caget(name) #+ amplitude * math.sin(elapsed)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@ def __init__(self, configDict={}):
pass

# Config Variables
self.path = None
self.hkl_config = None
self.hkl_config = {}

# Statistics
self.nFramesProcessed = 0
Expand Down Expand Up @@ -129,18 +128,29 @@ def __init__(self, configDict={}):
self.configure(configDict)

def configure(self, configDict):
"""Configure processor settings and initialize HKL parameters"""
"""Configure processor settings and initialize HKL parameters from TOML config."""
self.logger.debug(f'Configuration update: {configDict}')

if 'path' in configDict:
self.path = configDict["path"]
with open(self.path, "r") as config_file:
self.config = toml.load(config_file)

if 'HKL' in self.config:
self.hkl_config : dict = self.config['HKL']
for section in self.hkl_config.values(): # every section holds a dict
for channel in section.values(): # the values of each seciton is the pv name string
self.path = configDict['path']
else:
import settings as _settings
self.path = _settings.TOML_FILE
if self.path is None:
raise RuntimeError(
"HpcRsmProcessor: no 'path' in configDict and "
"settings.TOML_FILE is not set — configure a TOML config first."
)

with open(self.path, 'r') as f:
self.config = toml.load(f)

self.hkl_config = self.config.get('HKL', {})
self.hkl_pv_channels = set()
for section in self.hkl_config.values():
if isinstance(section, dict):
for channel in section.values():
if channel:
self.hkl_pv_channels.add(channel)

def parse_hkl_ndattributes(self, pva_object):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,10 +45,17 @@ def configure(self, configDict):
"""
if 'path' in configDict:
self.path = configDict['path']
with open(self.path, 'r') as f:
self.config: dict = toml.load(f)
else:
self.path = None
import settings as _settings
self.path = _settings.TOML_FILE
if self.path is None:
raise RuntimeError(
"HpcAnalysisProcessor: no 'path' in configDict and "
"settings.TOML_FILE is not set — configure a TOML config first."
)

with open(self.path, 'r') as f:
self.config: dict = toml.load(f)

self.axis1 = self.config.get('ANALYSIS', {}).get('AXIS1', None)
self.axis2 = self.config.get('ANALYSIS', {}).get('AXIS2', None)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -121,13 +121,24 @@ def configure(self, configDict):
# COPIED FROM hpc_rsm_consumer.py - HKL configuration setup
if 'path' in configDict:
self.path = configDict["path"]
with open(self.path, "r") as config_file:
self.config = toml.load(config_file)

if 'HKL' in self.config:
self.hkl_config : dict = self.config['HKL']
for section in self.hkl_config.values(): # every section holds a dict
for channel in section.values(): # the values of each seciton is the pv name string
else:
import settings as _settings
self.path = _settings.TOML_FILE
if self.path is None:
raise RuntimeError(
"HpcAdMetadataProcessor: no 'path' in configDict and "
"settings.TOML_FILE is not set — configure a TOML config first."
)

with open(self.path, "r") as config_file:
self.config = toml.load(config_file)

self.hkl_config = self.config.get('HKL', {})
self.hkl_pv_channels = set()
for section in self.hkl_config.values():
if isinstance(section, dict):
for channel in section.values():
if channel:
self.hkl_pv_channels.add(channel)

# Log configuration via central logger instead of writing to a file
Expand Down
9 changes: 3 additions & 6 deletions dashpva.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,11 +90,11 @@ def setup(ioc):
"""Sets up the PVA workflow or the simulator."""
if ioc:
click.echo('Running simulator setup...')
subprocess.Popen([sys.executable, 'consumers/sim_rsm_data.py'], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
subprocess.Popen([sys.executable, 'consumers/caIOC_servers/sim_rsm_data.py'], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
return

click.echo('Running standard PVA setup...')
exit_code = subprocess.run([sys.executable, 'pva_setup/pva_workflow_setup_dialog.py']).returncode
exit_code = subprocess.run([sys.executable, 'workflow/workflow.py']).returncode
sys.exit(exit_code)

@cli.command()
Expand All @@ -115,16 +115,13 @@ def workbench():
@cli.command()
@click.argument('name', type=click.Choice(['scan', 'scan-monitors']))
@click.option('--channel', default='', help='PVA channel (optional).')
@click.option('--config', 'config_path', default='', help='Path to TOML config file (optional).')
def monitor(name, channel, config_path):
def monitor(name, channel):
"""Open a specific monitor by name. Supported: scan (alias: scan-monitors)."""
click.echo(f'Opening monitor: {name}')
if name in ('scan', 'scan-monitors'):
command = [sys.executable, 'viewer/scan_view.py']
else:
raise click.BadParameter(f'Unknown view name: {name}')
if config_path:
command.extend(['--config', config_path])
if channel:
command.extend(['--channel', channel])
exit_code = subprocess.run(command).returncode
Expand Down
6 changes: 3 additions & 3 deletions database/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""
Database package.

DatabaseInterface is not yet available — profile models and managers
have not been implemented. Import directly from submodules as needed.
"""
from database.interface import DatabaseInterface

__all__ = ['DatabaseInterface']
84 changes: 64 additions & 20 deletions database/db.py
Original file line number Diff line number Diff line change
@@ -1,46 +1,90 @@
# """
# SQLAlchemy models for DashPVA profile management
# """
import logging
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
import settings

Base = declarative_base()
log = logging.getLogger(__name__)

Base = declarative_base()

# Database configuration using absolute path
# Get project root from the centralized settings module
PROJECT_ROOT = settings.PROJECT_ROOT
DB_FILE = PROJECT_ROOT / "dashpva.db"
DATABASE_URL = f"sqlite:///{DB_FILE.as_posix()}"

# Issue 5: module-level engine and session factory (not created per-call)
_engine = create_engine(
DATABASE_URL,
echo=False,
connect_args={"check_same_thread": False},
)
_Session = sessionmaker(bind=_engine, expire_on_commit=False)


def get_engine():
"""Create and return database engine"""
return create_engine(DATABASE_URL, echo=False)
"""Return the shared database engine."""
return _engine


def get_session():
"""Create and return database session"""
engine = get_engine()
Session = sessionmaker(bind=engine)
return Session()
"""Return a new session from the shared session factory."""
return _Session()


def create_tables():
"""Create all tables in the database"""
engine = get_engine()
Base.metadata.create_all(engine)
"""Create all tables in the database (idempotent via create_all)."""
# Late-import models so they register themselves with Base.metadata
import database.models.setting_value # noqa: F401
import database.models.settings # noqa: F401
import database.models.profile # noqa: F401
Base.metadata.create_all(_engine)
migrate_database()

def init_database():
"""Initialize the database with tables"""

def migrate_database():
"""Apply incremental column migrations to existing tables."""
import sqlite3
if not DB_FILE.exists():
create_tables()
# Seed default settings only on first creation using raw SQL script
return
conn = sqlite3.connect(str(DB_FILE))
try:
cursor = conn.cursor()
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='profiles'")
if cursor.fetchone():
cursor.execute("PRAGMA table_info(profiles)")
existing_cols = {row[1] for row in cursor.fetchall()}
if 'is_default' not in existing_cols:
cursor.execute("ALTER TABLE profiles ADD COLUMN is_default BOOLEAN NOT NULL DEFAULT 0")
if 'is_selected' not in existing_cols:
cursor.execute("ALTER TABLE profiles ADD COLUMN is_selected BOOLEAN NOT NULL DEFAULT 0")
conn.commit()
finally:
conn.close()


_init_done = False


def init_database():
"""Initialize the database (tables + seed). No-op after the first successful call."""
global _init_done
if _init_done:
return
# Issue 1: always create tables so new models get their tables on existing DBs
is_new_db = not DB_FILE.exists()
create_tables()
if is_new_db:
# Issue 7: log failures instead of silently swallowing them
try:
from scripts.seed_settings_defaults_sql import seed_defaults
seed_defaults()
except Exception:
# Seed script may be absent; ignore errors per original behavior
pass
print("Database initialized successfully")
else:
print("Database already exists")
except Exception as exc:
log.warning("seed_defaults() failed on new database: %s", exc)
# Issue 2: set flag only after all operations succeed
_init_done = True
log.debug("Database initialized (new_db=%s)", is_new_db)
Loading