Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
Show all changes
30 commits
Select commit Hold shift + click to select a range
5778c87
got it working
timmarkhuff Mar 12, 2026
240f124
Automatically reformatting code
Mar 12, 2026
ab42ece
adding DetectorsConfig model
Mar 16, 2026
83fe037
Automatically reformatting code
Mar 16, 2026
3d2895d
removing unnecessary script
Mar 16, 2026
5ca292e
Merge branch 'tim/edge-config' of github.com:groundlight/python-sdk i…
Mar 16, 2026
7c2b321
fixing a linter error
Mar 16, 2026
ddb26a1
Automatically reformatting code
Mar 16, 2026
2f8a474
responding to AI PR feedback
Mar 16, 2026
b3a7f66
Merge branch 'tim/edge-config' of github.com:groundlight/python-sdk i…
Mar 16, 2026
8fcebb9
Automatically reformatting code
Mar 16, 2026
45feffa
responding to more AI PR feedback
Mar 16, 2026
18a33b2
Merge remote updates into tim/edge-config.
Mar 16, 2026
dbe714f
Automatically reformatting code
Mar 16, 2026
0fbd05e
code cleanup
Mar 17, 2026
cb9394a
Merge branch 'tim/edge-config' of github.com:groundlight/python-sdk i…
Mar 17, 2026
21c5cd8
Automatically reformatting code
Mar 17, 2026
a4469a8
Merge branch 'main' into tim/edge-config
timmarkhuff Mar 17, 2026
963b35b
more code cleanup
Mar 17, 2026
53af849
Merge origin/tim/edge-config into tim/edge-config
Mar 17, 2026
9c06701
Automatically reformatting code
Mar 17, 2026
db1d86b
responding to PR feedback
Mar 17, 2026
8d4a491
Merge branch 'tim/edge-config' of github.com:groundlight/python-sdk i…
Mar 17, 2026
79121f8
addressing linter error
Mar 17, 2026
418eecf
Automatically reformatting code
Mar 17, 2026
b0b53db
code cleanup
timmarkhuff Mar 18, 2026
bf0c284
Automatically reformatting code
Mar 18, 2026
d0608a1
responding to PR feedback
timmarkhuff Mar 18, 2026
83d8f75
addressing linter complaint
timmarkhuff Mar 18, 2026
8505790
Automatically reformatting code
Mar 18, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 41 additions & 0 deletions edge_config_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
"""Example of constructing an edge endpoint configuration programmatically."""
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is temporary, I'll get rid of this before merging.

Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Getting rid of it is too easy, grab the most important snippets and put them in docs in a md file, we auto push those to a webpage


from groundlight import Groundlight
from groundlight.edge import DEFAULT, EDGE_WITH_ESCALATION, NO_CLOUD, EdgeInferenceConfig, RootEdgeConfig

gl = Groundlight()
detector1 = gl.get_detector("det_2z41nK0CyoFdWF6tEoB7DN5qwAx")
detector2 = gl.get_detector("det_2z41rs0Fo12LAk0oOZg0r4wR9Fn")
detector3 = gl.get_detector("det_2tYVTZrz8VLZhe94tjuPRl5rDeG")
detector4 = gl.get_detector("det_2sDfBz5xp6ZysB82kK7LfNYYSXx")
detector5 = gl.get_detector("det_2sDfGUP8cBt9Wrq0YFVLjVZhoI5")

config = RootEdgeConfig()

config.add_detector(detector1, NO_CLOUD)
config.add_detector(detector2, EDGE_WITH_ESCALATION)
config.add_detector(detector3, DEFAULT)

# Custom configs work alongside presets
my_custom_config = EdgeInferenceConfig(
name="my_custom_config",
always_return_edge_prediction=True,
min_time_between_escalations=0.5,
)
detector_id = detector4.id
config.add_detector(detector_id, my_custom_config)

# Cannot reuse names on EdgeInferenceConfig
config_with_name_collision = EdgeInferenceConfig(name='default')
try:
config.add_detector(detector5, config_with_name_collision)
except ValueError as e:
print(e)

# Frozen -- mutation raises an error
try:
NO_CLOUD.enabled = False
except Exception as e:
print(e)

print(config.model_dump_json(indent=2))
21 changes: 21 additions & 0 deletions src/groundlight/edge/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
from .config import (
DEFAULT,
DISABLED,
EDGE_WITH_ESCALATION,
NO_CLOUD,
DetectorConfig,
EdgeInferenceConfig,
GlobalConfig,
RootEdgeConfig,
)

__all__ = [
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nit, this is a little redundant since it includes all objects that could be importet

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ChatGPT says: "good point. all is kept intentionally to make the public API explicit/stable for this new module (and to control wildcard-import surface), but it can be removed if repo convention prefers omitting it."

I'm not really sure which way is best...

Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

For now it's fine either way, worst that can happen leaving it in is that someone adds something later and gets caught off guard when it doesn't import the way they expect. Leave it since it's less keystrokes now that it's already in

"DEFAULT",
"DISABLED",
"EDGE_WITH_ESCALATION",
"NO_CLOUD",
"DetectorConfig",
"EdgeInferenceConfig",
"GlobalConfig",
"RootEdgeConfig",
]
134 changes: 134 additions & 0 deletions src/groundlight/edge/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
from typing import Union

from model import Detector
from pydantic import BaseModel, ConfigDict, Field, model_validator
from typing_extensions import Self


class GlobalConfig(BaseModel):
refresh_rate: float = Field(
default=60.0,
description="The interval (in seconds) at which the inference server checks for a new model binary update.",
)
confident_audit_rate: float = Field(
default=1e-5, # A detector running at 1 FPS = ~100,000 IQ/day, so 1e-5 is ~1 confident IQ/day audited
description="The probability that any given confident prediction will be sent to the cloud for auditing.",
)


class EdgeInferenceConfig(BaseModel):
"""
Configuration for edge inference on a specific detector.
"""

model_config = ConfigDict(frozen=True)

name: str = Field(..., exclude=True, description="A unique name for this inference config preset.")
enabled: bool = Field( # TODO investigate and update the functionality of this option
default=True, description="Whether the edge endpoint should accept image queries for this detector."
)
api_token: str | None = Field(
default=None, description="API token used to fetch the inference model for this detector."
)
always_return_edge_prediction: bool = Field(
default=False,
description=(
"Indicates if the edge-endpoint should always provide edge ML predictions, regardless of confidence. "
"When this setting is true, whether or not the edge-endpoint should escalate low-confidence predictions "
"to the cloud is determined by `disable_cloud_escalation`."
),
)
disable_cloud_escalation: bool = Field(
default=False,
description=(
"Never escalate ImageQueries from the edge-endpoint to the cloud. "
"Requires `always_return_edge_prediction=True`."
),
)
min_time_between_escalations: float = Field(
default=2.0,
description=(
"The minimum time (in seconds) to wait between cloud escalations for a given detector. "
"Cannot be less than 0.0. "
"Only applies when `always_return_edge_prediction=True` and `disable_cloud_escalation=False`."
),
)

@model_validator(mode="after")
def validate_configuration(self) -> Self:
if self.disable_cloud_escalation and not self.always_return_edge_prediction:
raise ValueError(
"The `disable_cloud_escalation` flag is only valid when `always_return_edge_prediction` is set to True."
)
if self.min_time_between_escalations < 0.0:
raise ValueError("`min_time_between_escalations` cannot be less than 0.0.")
return self


class DetectorConfig(BaseModel):
"""
Configuration for a specific detector.
"""

detector_id: str = Field(..., description="Detector ID")
edge_inference_config: str = Field(..., description="Config for edge inference.")


class RootEdgeConfig(BaseModel):
"""
Root configuration for edge inference.
"""

global_config: GlobalConfig = Field(default_factory=GlobalConfig)
edge_inference_configs: dict[str, EdgeInferenceConfig] = Field(default_factory=dict)
detectors: list[DetectorConfig] = Field(default_factory=list)

@model_validator(mode="after")
def validate_inference_configs(self):
for detector_config in self.detectors:
if detector_config.edge_inference_config not in self.edge_inference_configs:
raise ValueError(f"Edge inference config '{detector_config.edge_inference_config}' not defined.")
return self

def add_detector(
self, detector: Union[str, Detector], edge_inference_config: Union[str, EdgeInferenceConfig]
) -> None:
detector_id = detector.id if isinstance(detector, Detector) else detector
if any(d.detector_id == detector_id for d in self.detectors):
raise ValueError(f"A detector with ID '{detector_id}' already exists.")
if isinstance(edge_inference_config, EdgeInferenceConfig):
config = edge_inference_config
existing = self.edge_inference_configs.get(config.name)
if existing is None:
self.edge_inference_configs[config.name] = config
elif existing is not config:
raise ValueError(f"A different inference config named '{config.name}' is already registered.")
config_name = config.name
else:
config_name = edge_inference_config
if config_name not in self.edge_inference_configs:
raise ValueError(
f"Edge inference config '{config_name}' not defined. "
f"Available configs: {list(self.edge_inference_configs.keys())}"
)
self.detectors.append(
DetectorConfig(
detector_id=detector_id,
edge_inference_config=config_name,
)
)


# Preset inference configs matching the standard edge-endpoint defaults.
DEFAULT = EdgeInferenceConfig(name="default")
EDGE_WITH_ESCALATION = EdgeInferenceConfig(
name="edge_with_escalation",
always_return_edge_prediction=True,
min_time_between_escalations=2.0,
)
NO_CLOUD = EdgeInferenceConfig(
name="no_cloud",
always_return_edge_prediction=True,
disable_cloud_escalation=True,
)
DISABLED = EdgeInferenceConfig(name="disabled", enabled=False)