Skip to content

Commit 80041d6

Browse files
QuakeStringclaude
andcommitted
Remove MAX_VARS read limit — optimization pipeline handles any item count
The MAX_VARS=20 limit was inherited from the original snap7 C library's fixed-size array. Our pure Python pipeline sorts, merges, and packetizes any number of items into PDU-sized packets automatically. The limit is retained for write_multi_vars where no optimization exists. Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent 524be95 commit 80041d6

File tree

2 files changed

+51
-19
lines changed

2 files changed

+51
-19
lines changed

snap7/client.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -560,21 +560,19 @@ def read_multi_vars(self, items: Union[List[dict[str, Any]], "Array[S7DataItem]"
560560
addresses, and packs them into minimal PDU-sized multi-item S7
561561
requests. CT/TM areas fall back to individual reads.
562562
563+
There is no hard limit on the number of items — the optimization
564+
pipeline automatically packs them into PDU-sized packets and
565+
dispatches in parallel.
566+
563567
Args:
564568
items: List of item specifications or S7DataItem array
565569
566570
Returns:
567571
Tuple of (result, items with data)
568-
569-
Raises:
570-
ValueError: If more than MAX_VARS items are requested
571572
"""
572573
if not items:
573574
return (0, items)
574575

575-
if len(items) > self.MAX_VARS:
576-
raise ValueError(f"Too many items: {len(items)} exceeds MAX_VARS ({self.MAX_VARS})")
577-
578576
# Handle S7DataItem array (ctypes)
579577
if hasattr(items, "_type_") and hasattr(items[0], "Area"):
580578
s7_items = cast("Array[S7DataItem]", items)

tests/test_client.py

Lines changed: 47 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1267,15 +1267,35 @@ class TestMaxVars:
12671267
def test_max_vars_constant(self) -> None:
12681268
assert Client.MAX_VARS == 20
12691269

1270-
def test_read_multi_vars_rejects_too_many(self) -> None:
1270+
def test_read_multi_vars_no_item_limit(self) -> None:
1271+
"""read_multi_vars should accept any number of items (pipeline handles packing)."""
12711272
client = Client()
12721273
client.connected = True
12731274
mock_conn = MagicMock()
1275+
mock_conn.timeout = 5.0
12741276
client.connection = mock_conn
12751277

1276-
items = [{"area": Area.DB, "db_number": 1, "start": i, "size": 1} for i in range(21)]
1277-
with pytest.raises(ValueError, match="Too many items.*21.*MAX_VARS.*20"):
1278-
client.read_multi_vars(items)
1278+
mock_conn.receive_data = MagicMock(return_value=b"\x00" * 20)
1279+
mock_conn.data_available = MagicMock(return_value=True)
1280+
1281+
sent_seqs: list[int] = []
1282+
1283+
def track_send(pdu: bytes) -> None:
1284+
sent_seqs.append(struct.unpack(">H", pdu[4:6])[0])
1285+
1286+
mock_conn.send_data = track_send
1287+
seq_iter = iter(lambda: sent_seqs.pop(0) if sent_seqs else 0, -1)
1288+
1289+
def mock_parse(data: bytes) -> dict[str, Any]:
1290+
return {"raw_data_section": b"", "parameters": {"item_count": 1}, "sequence": next(seq_iter)}
1291+
1292+
client.protocol.parse_response = mock_parse # type: ignore[assignment]
1293+
client.protocol.extract_multi_read_data = MagicMock(side_effect=lambda resp, count: [bytearray(1)] * count)
1294+
1295+
items = [{"area": Area.DB, "db_number": 1, "start": i, "size": 1} for i in range(100)]
1296+
result_code, results = client.read_multi_vars(items)
1297+
assert result_code == 0
1298+
assert len(results) == 100
12791299

12801300
def test_write_multi_vars_rejects_too_many(self) -> None:
12811301
client = Client()
@@ -1287,26 +1307,40 @@ def test_write_multi_vars_rejects_too_many(self) -> None:
12871307
with pytest.raises(ValueError, match="Too many items.*21.*MAX_VARS.*20"):
12881308
client.write_multi_vars(items)
12891309

1290-
def test_read_multi_vars_accepts_max(self) -> None:
1291-
"""20 items (the limit) should not raise."""
1310+
def test_read_multi_vars_large_batch(self) -> None:
1311+
"""read_multi_vars should handle large batches via the optimization pipeline."""
12921312
client = Client()
12931313
client.connected = True
12941314
mock_conn = MagicMock()
1315+
mock_conn.timeout = 5.0
12951316
client.connection = mock_conn
12961317

1297-
# Mock connection-level send/receive and protocol parsing
12981318
mock_conn.send_data = MagicMock()
12991319
mock_conn.receive_data = MagicMock(return_value=b"\x00" * 20)
13001320
mock_conn.data_available = MagicMock(return_value=True)
1301-
client.protocol.parse_response = MagicMock(
1302-
return_value={"raw_data_section": b"", "parameters": {"item_count": 1}, "sequence": 0}
1303-
)
1304-
client.protocol.extract_multi_read_data = MagicMock(side_effect=lambda resp, count: [bytearray(1)] * count)
13051321

1306-
items = [{"area": Area.DB, "db_number": 1, "start": i, "size": 1} for i in range(20)]
1322+
# Track sequence numbers from sent PDUs to echo them back
1323+
sent_seqs: list[int] = []
1324+
original_send = mock_conn.send_data
1325+
1326+
def track_send(pdu: bytes) -> None:
1327+
seq = struct.unpack(">H", pdu[4:6])[0]
1328+
sent_seqs.append(seq)
1329+
original_send(pdu)
1330+
1331+
mock_conn.send_data = track_send
1332+
seq_iter = iter(lambda: sent_seqs.pop(0) if sent_seqs else 0, -1)
1333+
1334+
def mock_parse(data: bytes) -> dict[str, Any]:
1335+
return {"raw_data_section": b"", "parameters": {"item_count": 1}, "sequence": next(seq_iter)}
1336+
1337+
client.protocol.parse_response = mock_parse # type: ignore[assignment]
1338+
client.protocol.extract_multi_read_data = MagicMock(side_effect=lambda resp, count: [bytearray(4)] * count)
1339+
1340+
items = [{"area": Area.DB, "db_number": 1, "start": i * 4, "size": 4} for i in range(500)]
13071341
result_code, results = client.read_multi_vars(items)
13081342
assert result_code == 0
1309-
assert len(results) == 20
1343+
assert len(results) == 500
13101344

13111345
def test_write_multi_vars_accepts_max(self) -> None:
13121346
"""20 items (the limit) should not raise."""

0 commit comments

Comments
 (0)