Skip to content

Commit 23f58f6

Browse files
committed
pre-commit fixed
1 parent 923bcb9 commit 23f58f6

5 files changed

Lines changed: 148 additions & 17 deletions

File tree

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,31 @@
1+
"""Adjust PV address uniqueness constraints.
2+
3+
Revision ID: 6c1c0c2f8a1b
4+
Revises: 61476c608fc3
5+
Create Date: 2026-03-10 00:00:00.000000
6+
"""
7+
from collections.abc import Sequence
8+
9+
from alembic import op
10+
11+
# revision identifiers, used by Alembic.
12+
revision: str = "6c1c0c2f8a1b"
13+
down_revision: str | None = "61476c608fc3"
14+
branch_labels: str | Sequence[str] | None = None
15+
depends_on: str | Sequence[str] | None = None
16+
17+
18+
def upgrade() -> None:
19+
op.drop_index(op.f("ix_pv_readback_address"), table_name="pv")
20+
op.drop_index(op.f("ix_pv_config_address"), table_name="pv")
21+
22+
op.create_index(op.f("ix_pv_readback_address"), "pv", ["readback_address"], unique=False)
23+
op.create_index(op.f("ix_pv_config_address"), "pv", ["config_address"], unique=False)
24+
25+
26+
def downgrade() -> None:
27+
op.drop_index(op.f("ix_pv_readback_address"), table_name="pv")
28+
op.drop_index(op.f("ix_pv_config_address"), table_name="pv")
29+
30+
op.create_index(op.f("ix_pv_readback_address"), "pv", ["readback_address"], unique=True)
31+
op.create_index(op.f("ix_pv_config_address"), "pv", ["config_address"], unique=True)

app/models/pv.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,8 @@ class PV(Base, UUIDMixin, TimestampMixin):
2626

2727
# PV addresses (at least one required - enforced at service layer)
2828
setpoint_address: Mapped[str | None] = mapped_column(String(255), unique=True, nullable=True, index=True)
29-
readback_address: Mapped[str | None] = mapped_column(String(255), unique=True, nullable=True, index=True)
30-
config_address: Mapped[str | None] = mapped_column(String(255), unique=True, nullable=True, index=True)
29+
readback_address: Mapped[str | None] = mapped_column(String(255), nullable=True, index=True)
30+
config_address: Mapped[str | None] = mapped_column(String(255), nullable=True, index=True)
3131

3232
# Metadata
3333
device: Mapped[str | None] = mapped_column(String(255), nullable=True, index=True)

app/repositories/pv_repository.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,28 @@ async def find_by_address(self, address: str) -> PV | None:
2222
)
2323
return result.scalar_one_or_none()
2424

25+
async def find_by_setpoint(self, setpoint_address: str) -> PV | None:
26+
"""Find PV by setpoint address only."""
27+
result = await self.session.execute(
28+
select(PV)
29+
.options(selectinload(PV.tags).selectinload(Tag.group))
30+
.where(PV.setpoint_address == setpoint_address)
31+
)
32+
return result.scalar_one_or_none()
33+
34+
async def get_existing_setpoints(self, setpoint_addresses: list[str]) -> set[str]:
35+
"""Return set of setpoint addresses that already exist."""
36+
if not setpoint_addresses:
37+
return set()
38+
# PostgreSQL limit is 32767 parameters, use a safe batch size.
39+
batch_size = 30000
40+
existing: set[str] = set()
41+
for i in range(0, len(setpoint_addresses), batch_size):
42+
batch = setpoint_addresses[i : i + batch_size]
43+
result = await self.session.execute(select(PV.setpoint_address).where(PV.setpoint_address.in_(batch)))
44+
existing.update({row[0] for row in result.all() if row[0]})
45+
return existing
46+
2547
async def search_by_name(
2648
self,
2749
search: str | None = None,

app/services/pv_service.py

Lines changed: 59 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
from sqlalchemy.exc import IntegrityError
12
from sqlalchemy.ext.asyncio import AsyncSession
23

34
from app.models.pv import PV
@@ -33,6 +34,14 @@ def _to_dto(self, pv: PV) -> PVElementDTO:
3334
lastModifiedDate=pv.updated_at,
3435
)
3536

37+
@staticmethod
38+
def _normalize_address(address: str | None) -> str | None:
39+
"""Normalize optional PV addresses from API payloads."""
40+
if address is None:
41+
return None
42+
normalized = address.strip()
43+
return normalized or None
44+
3645
async def search_paged(
3746
self,
3847
search: str | None = None,
@@ -61,12 +70,17 @@ async def get_by_id(self, pv_id: str) -> PVElementDTO | None:
6170

6271
async def create(self, data: NewPVElementDTO) -> PVElementDTO:
6372
"""Create a new PV."""
73+
setpoint_address = self._normalize_address(data.setpointAddress)
74+
readback_address = self._normalize_address(data.readbackAddress)
75+
config_address = self._normalize_address(data.configAddress)
76+
if not any([setpoint_address, readback_address, config_address]):
77+
raise ValueError("At least one address (setpoint, readback, or config) is required")
78+
6479
# Check for duplicate addresses
65-
for address in [data.setpointAddress, data.readbackAddress, data.configAddress]:
66-
if address:
67-
existing = await self.pv_repo.find_by_address(address)
68-
if existing:
69-
raise ValueError(f"PV with address '{address}' already exists")
80+
if setpoint_address:
81+
existing = await self.pv_repo.find_by_setpoint(setpoint_address)
82+
if existing:
83+
raise ValueError(f"PV with setpoint address '{setpoint_address}' already exists")
7084

7185
# Get tags
7286
tags = []
@@ -76,9 +90,9 @@ async def create(self, data: NewPVElementDTO) -> PVElementDTO:
7690
raise ValueError("One or more tag IDs are invalid")
7791

7892
pv = PV(
79-
setpoint_address=data.setpointAddress,
80-
readback_address=data.readbackAddress,
81-
config_address=data.configAddress,
93+
setpoint_address=setpoint_address,
94+
readback_address=readback_address,
95+
config_address=config_address,
8296
device=data.device,
8397
description=data.description,
8498
abs_tolerance=data.absTolerance,
@@ -87,14 +101,41 @@ async def create(self, data: NewPVElementDTO) -> PVElementDTO:
87101
tags=tags,
88102
)
89103

90-
pv = await self.pv_repo.create(pv)
104+
try:
105+
pv = await self.pv_repo.create(pv)
106+
except IntegrityError as e:
107+
raise ValueError(f"PV addresses violate uniqueness constraints: {e}") from e
91108
return self._to_dto(pv)
92109

93110
async def create_many(self, data_list: list[NewPVElementDTO]) -> list[PVElementDTO]:
94111
"""Bulk create PVs."""
112+
normalized_records: list[tuple[str | None, str | None, str | None, NewPVElementDTO]] = []
113+
for data in data_list:
114+
setpoint_address = self._normalize_address(data.setpointAddress)
115+
readback_address = self._normalize_address(data.readbackAddress)
116+
config_address = self._normalize_address(data.configAddress)
117+
if not any([setpoint_address, readback_address, config_address]):
118+
raise ValueError("At least one address (setpoint, readback, or config) is required")
119+
normalized_records.append((setpoint_address, readback_address, config_address, data))
120+
121+
setpoints = [r[0] for r in normalized_records if r[0]]
122+
seen = set()
123+
duplicate_setpoints = set()
124+
for s in setpoints:
125+
if s in seen:
126+
duplicate_setpoints.add(s)
127+
else:
128+
seen.add(s)
129+
if duplicate_setpoints:
130+
raise ValueError(f"Duplicate setpoint addresses in import: {sorted(duplicate_setpoints)[:10]}")
131+
132+
existing_setpoints = await self.pv_repo.get_existing_setpoints(setpoints)
133+
if existing_setpoints:
134+
raise ValueError(f"Setpoint addresses already exist: {sorted(existing_setpoints)[:10]}")
135+
95136
# Collect all tag IDs
96137
all_tag_ids = set()
97-
for data in data_list:
138+
for _, _, _, data in normalized_records:
98139
all_tag_ids.update(data.tags)
99140

100141
# Fetch all tags at once
@@ -105,12 +146,12 @@ async def create_many(self, data_list: list[NewPVElementDTO]) -> list[PVElementD
105146

106147
# Create PV objects
107148
pvs = []
108-
for data in data_list:
149+
for setpoint_address, readback_address, config_address, data in normalized_records:
109150
pv_tags = [tags_by_id[tid] for tid in data.tags if tid in tags_by_id]
110151
pv = PV(
111-
setpoint_address=data.setpointAddress,
112-
readback_address=data.readbackAddress,
113-
config_address=data.configAddress,
152+
setpoint_address=setpoint_address,
153+
readback_address=readback_address,
154+
config_address=config_address,
114155
device=data.device,
115156
description=data.description,
116157
abs_tolerance=data.absTolerance,
@@ -121,7 +162,10 @@ async def create_many(self, data_list: list[NewPVElementDTO]) -> list[PVElementD
121162
pvs.append(pv)
122163

123164
# Bulk insert
124-
pvs = await self.pv_repo.bulk_create(pvs)
165+
try:
166+
pvs = await self.pv_repo.bulk_create(pvs)
167+
except IntegrityError as e:
168+
raise ValueError(f"PV addresses violate uniqueness constraints: {e}") from e
125169
return [self._to_dto(pv) for pv in pvs]
126170

127171
async def update(self, pv_id: str, data: UpdatePVElementDTO) -> PVElementDTO | None:

tests/test_api/test_pvs.py

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,40 @@ async def test_bulk_create_empty_list(self, client: AsyncClient):
110110
data = response.json()
111111
assert data["payload"] == []
112112

113+
@pytest.mark.asyncio
114+
async def test_bulk_create_allows_blank_optional_addresses(self, client: AsyncClient):
115+
"""Blank optional addresses from CSV imports should be treated as NULL."""
116+
pvs_data = [
117+
{"setpointAddress": "CSV:PV:1", "readbackAddress": "", "configAddress": "", "device": "CSV-1"},
118+
{"setpointAddress": "CSV:PV:2", "readbackAddress": "", "configAddress": "", "device": "CSV-2"},
119+
]
120+
121+
response = await client.post("/v1/pvs/multi", json=pvs_data)
122+
123+
assert response.status_code == 200
124+
data = response.json()
125+
assert data["errorCode"] == 0
126+
assert len(data["payload"]) == 2
127+
assert data["payload"][0]["readbackAddress"] is None
128+
assert data["payload"][0]["configAddress"] is None
129+
assert data["payload"][1]["readbackAddress"] is None
130+
assert data["payload"][1]["configAddress"] is None
131+
132+
@pytest.mark.asyncio
133+
async def test_bulk_create_allows_duplicate_readback(self, client: AsyncClient):
134+
"""Readback addresses can repeat across different setpoints."""
135+
pvs_data = [
136+
{"setpointAddress": "CSV:PV:3", "readbackAddress": "DUP:RB:1", "device": "CSV-3"},
137+
{"setpointAddress": "CSV:PV:4", "readbackAddress": "DUP:RB:1", "device": "CSV-4"},
138+
]
139+
140+
response = await client.post("/v1/pvs/multi", json=pvs_data)
141+
142+
assert response.status_code == 200
143+
data = response.json()
144+
assert data["errorCode"] == 0
145+
assert len(data["payload"]) == 2
146+
113147

114148
class TestPVSearch:
115149
"""Tests for PV search endpoints."""

0 commit comments

Comments
 (0)