Skip to content

Commit 1dccadb

Browse files
committed
Remove debug lock-finding code from orchestrator
The _find_locks() function and related debug diagnostics were causing isinstance() errors when traversing complex objects. This code was only used for debugging pickle issues and is not needed in production. Fixes CI failures with omero tests where the lock detection was recursively traversing module internals and failing on type checks.
1 parent fcb8524 commit 1dccadb

2 files changed

Lines changed: 1 addition & 96 deletions

File tree

openhcs/core/orchestrator/orchestrator.py

Lines changed: 0 additions & 95 deletions
Original file line numberDiff line numberDiff line change
@@ -50,54 +50,6 @@
5050
resolve_lazy_configurations_for_serialization,
5151
)
5252
from openhcs.microscopes import create_microscope_handler
53-
54-
55-
def _find_locks(obj, path="", seen=None):
56-
"""Recursively find threading locks in an object."""
57-
import threading
58-
59-
# Get lock types once
60-
lock_types = (
61-
threading.Lock,
62-
threading.RLock,
63-
threading.Semaphore,
64-
threading.BoundedSemaphore,
65-
threading.Condition,
66-
)
67-
68-
if seen is None:
69-
seen = set()
70-
71-
if id(obj) in seen:
72-
return []
73-
seen.add(id(obj))
74-
75-
locks = []
76-
77-
# Check if it's a lock
78-
try:
79-
if isinstance(obj, lock_types):
80-
locks.append((path, type(obj).__name__))
81-
except TypeError as e:
82-
logger.warning(f"isinstance check failed for {path}: {e}")
83-
84-
# Recursively check containers
85-
try:
86-
if isinstance(obj, dict):
87-
for key, val in obj.items():
88-
locks.extend(_find_locks(val, f"{path}[{repr(key)}]", seen))
89-
elif isinstance(obj, (list, tuple)):
90-
for idx, val in enumerate(obj):
91-
locks.extend(_find_locks(val, f"{path}[{idx}]", seen))
92-
elif hasattr(obj, "__dict__"):
93-
for attr_name, val in obj.__dict__.items():
94-
locks.extend(_find_locks(val, f"{path}.{attr_name}", seen))
95-
except (TypeError, AttributeError):
96-
pass
97-
98-
return locks
99-
100-
10153
from openhcs.microscopes.microscope_base import MicroscopeHandler
10254
from openhcs.processing.backends.analysis.consolidate_analysis_results import (
10355
consolidate_results_directories,
@@ -1413,53 +1365,6 @@ def execute_compiled_plate(
14131365
continue
14141366
owned_wells = list(worker_assignments[worker_slot])
14151367

1416-
# DEBUG: Check what's being pickled for locks
1417-
logger.info(f"DEBUG: Submitting worker {worker_slot}")
1418-
logger.info(
1419-
f"DEBUG: pipeline_definition has {len(pipeline_definition)} steps"
1420-
)
1421-
1422-
# Check pipeline_definition for locks
1423-
pd_locks = _find_locks(
1424-
pipeline_definition, "pipeline_definition"
1425-
)
1426-
if pd_locks:
1427-
logger.error(f"🔒 LOCKS IN pipeline_definition: {pd_locks}")
1428-
else:
1429-
logger.info(f"DEBUG: pipeline_definition is pickle-safe")
1430-
1431-
# Check lane_contexts for locks
1432-
lc_locks = _find_locks(lane_contexts, "lane_contexts")
1433-
if lc_locks:
1434-
logger.error(f"🔒 LOCKS IN lane_contexts: {lc_locks}")
1435-
else:
1436-
logger.info(f"DEBUG: lane_contexts is pickle-safe")
1437-
1438-
for i, step in enumerate(pipeline_definition):
1439-
func_attr = getattr(step, "func", None)
1440-
logger.info(
1441-
f"DEBUG: step[{i}].func = {type(func_attr).__name__ if func_attr else 'None'}"
1442-
)
1443-
logger.info(
1444-
f"DEBUG: lane_contexts has {len(lane_contexts)} axes"
1445-
)
1446-
for axis_id, axis_contexts in lane_contexts:
1447-
logger.info(
1448-
f"DEBUG: axis {axis_id} has {len(axis_contexts)} contexts"
1449-
)
1450-
for ctx_key, ctx in axis_contexts[
1451-
:1
1452-
]: # Just check first context
1453-
if hasattr(ctx, "step_plans"):
1454-
for step_idx, plan in list(ctx.step_plans.items())[
1455-
:2
1456-
]: # Just check first 2
1457-
if "func" in plan:
1458-
func_val = plan["func"]
1459-
logger.info(
1460-
f"DEBUG: context step_plans[{step_idx}]['func'] = {type(func_val).__name__}"
1461-
)
1462-
14631368
try:
14641369
future = executor.submit(
14651370
_execute_worker_lane_static,

0 commit comments

Comments
 (0)