Skip to content

Commit 3ff1cde

Browse files
committed
sequential pagniation
1 parent 0569998 commit 3ff1cde

2 files changed

Lines changed: 19 additions & 80 deletions

File tree

src/trustshell/__init__.py

Lines changed: 18 additions & 79 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,8 @@
33
import logging
44
import os
55
import sys
6-
import asyncio
76
from urllib.parse import urlparse, urlunparse, quote, parse_qs
8-
from typing import Optional, Any, cast
7+
from typing import Optional, Any
98

109
import httpx
1110
import jwt
@@ -309,15 +308,15 @@ def launch_browser(code_challenge: str, state: str) -> None:
309308
webbrowser.open(url)
310309

311310

312-
async def paginated_trustify_query_async(
311+
def paginated_trustify_query(
313312
endpoint: str,
314313
base_params: dict[str, Any],
315314
auth_header: dict[str, str],
316315
component_name: str = "",
317316
limit: int = 100,
318317
) -> dict[str, Any]:
319318
"""
320-
Perform a paginated query to a Trustify API endpoint using parallel requests.
319+
Perform a paginated query to a Trustify API endpoint using sequential requests.
321320
322321
Args:
323322
endpoint: The API endpoint URL
@@ -330,12 +329,12 @@ async def paginated_trustify_query_async(
330329
dict with 'items' and 'total' keys containing all paginated results
331330
"""
332331

333-
async def make_request_with_retry(
334-
client: httpx.AsyncClient, query_params: dict[str, Any], headers: dict[str, str]
332+
def make_request_with_retry(
333+
client: httpx.Client, query_params: dict[str, Any], headers: dict[str, str]
335334
) -> httpx.Response:
336-
"""Make async HTTP request with 401 retry logic"""
335+
"""Make HTTP request with 401 retry logic"""
337336
try:
338-
response = await client.get(
337+
response = client.get(
339338
endpoint, params=query_params, headers=headers, timeout=2400
340339
)
341340
response.raise_for_status()
@@ -347,17 +346,17 @@ async def make_request_with_retry(
347346
new_access_token = get_access_token()
348347
if new_access_token:
349348
headers["Authorization"] = f"Bearer {new_access_token}"
350-
response = await client.get(
349+
response = client.get(
351350
endpoint, params=query_params, headers=headers, timeout=300
352351
)
353352
response.raise_for_status()
354353
return response
355354
raise
356355

357-
async with httpx.AsyncClient() as client:
356+
with httpx.Client() as client:
358357
# First request to get total count
359358
query_params = {**base_params, "limit": limit, "offset": 0}
360-
first_response = await make_request_with_retry(
359+
first_response = make_request_with_retry(
361360
client, query_params, auth_header
362361
)
363362
first_result = first_response.json()
@@ -370,51 +369,21 @@ async def make_request_with_retry(
370369

371370
all_items = first_result.get("items", [])
372371

373-
# Calculate remaining pages needed
374-
remaining_items = total_available - len(all_items)
375-
if remaining_items <= 0:
376-
# All items fit in first page
377-
if component_name:
378-
console.print(
379-
f"Retrieved {len(all_items)} items out of {total_available} total for {component_name}"
380-
)
381-
return {"items": all_items, "total": total_available}
382-
383-
# Calculate offsets for remaining pages
384-
remaining_pages = []
372+
# Fetch remaining pages sequentially
385373
offset = limit
386374
while offset < total_available:
387-
remaining_pages.append(offset)
388-
offset += limit
389-
390-
# Make parallel requests for remaining pages
391-
async def fetch_page(page_offset: int) -> list[Any]:
392-
"""Fetch a single page of results"""
393-
page_params = {**base_params, "limit": limit, "offset": page_offset}
375+
page_params = {**base_params, "limit": limit, "offset": offset}
394376
try:
395-
response = await make_request_with_retry(
377+
response = make_request_with_retry(
396378
client, page_params, auth_header
397379
)
398380
result = response.json()
399-
return cast(list[Any], result.get("items", []))
381+
page_items = result.get("items", [])
382+
all_items.extend(page_items)
383+
offset += limit
400384
except Exception as e:
401-
logger.error(f"Error fetching page at offset {page_offset}: {e}")
402-
return []
403-
404-
# Execute all remaining page requests in parallel
405-
if remaining_pages:
406-
page_results = await asyncio.gather(
407-
*[fetch_page(offset) for offset in remaining_pages],
408-
return_exceptions=True,
409-
)
410-
411-
# Combine results from all pages
412-
for page_items in page_results:
413-
if isinstance(page_items, list):
414-
all_items.extend(page_items)
415-
else:
416-
# Handle exceptions from gather
417-
logger.error(f"Error in parallel page fetch: {page_items}")
385+
logger.error(f"Error fetching page at offset {offset}: {e}")
386+
break
418387

419388
if component_name:
420389
console.print(
@@ -424,36 +393,6 @@ async def fetch_page(page_offset: int) -> list[Any]:
424393
return {"items": all_items, "total": total_available}
425394

426395

427-
def paginated_trustify_query(
428-
endpoint: str,
429-
base_params: dict[str, Any],
430-
auth_header: dict[str, str],
431-
component_name: str = "",
432-
limit: int = 100,
433-
) -> dict[str, Any]:
434-
"""
435-
Perform a paginated query to a Trustify API endpoint.
436-
437-
This is a synchronous wrapper around the async implementation that uses
438-
parallel requests for better performance.
439-
440-
Args:
441-
endpoint: The API endpoint URL
442-
base_params: Base query parameters (will add limit/offset)
443-
auth_header: Authentication headers
444-
component_name: Component name for progress messages (optional)
445-
limit: Number of items per request
446-
447-
Returns:
448-
dict with 'items' and 'total' keys containing all paginated results
449-
"""
450-
return asyncio.run(
451-
paginated_trustify_query_async(
452-
endpoint, base_params, auth_header, component_name, limit
453-
)
454-
)
455-
456-
457396
def render_tree(root: Node) -> None:
458397
"""Pretty print a tree using name only"""
459398
for pre, _, node in RenderTree(root):

uv.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)