1- import base64
21import datetime
32import json
43import enum
@@ -38,39 +37,58 @@ class PipelineRunAnnotationSystemKey(enum.StrEnum):
3837}
3938
4039# ---------------------------------------------------------------------------
41- # Page-token helpers
40+ # Cursor encode / decode
4241# ---------------------------------------------------------------------------
4342
44- _PAGE_TOKEN_OFFSET_KEY : Final [str ] = "offset"
45- _PAGE_TOKEN_FILTER_KEY : Final [str ] = "filter"
46- _PAGE_TOKEN_FILTER_QUERY_KEY : Final [str ] = "filter_query"
43+ CURSOR_SEPARATOR : Final [str ] = "~"
4744
4845
49- def _encode_page_token ( * , page_token_dict : dict [ str , Any ] ) -> str :
50- return base64 . b64encode ( json . dumps ( page_token_dict ). encode ( "utf-8" )). decode ( "utf-8" )
46+ def encode_cursor ( created_at : datetime . datetime , run_id : str ) -> str :
47+ """Encode the last row's position as a tilde-separated cursor string.
5148
49+ The created_at from PipelineRun is naive UTC (no UtcDateTime decorator on
50+ this column). We stamp it as UTC here so the cursor string is
51+ timezone-explicit for readability and correctness.
52+ decode_cursor() normalizes back to naive UTC for DB comparison.
53+ """
54+ if created_at .tzinfo is None :
55+ created_at = created_at .replace (tzinfo = datetime .timezone .utc )
56+ return f"{ created_at .isoformat ()} { CURSOR_SEPARATOR } { run_id } "
5257
53- def _decode_page_token (* , page_token : str | None ) -> dict [str , Any ]:
54- return json .loads (base64 .b64decode (page_token )) if page_token else {}
5558
59+ def decode_cursor (cursor : str | None ) -> tuple [datetime .datetime , str ] | None :
60+ """Parse a tilde-separated cursor string into (created_at, run_id).
5661
57- def _resolve_filter_value (
58- * ,
59- filter : str | None ,
60- filter_query : str | None ,
61- page_token : str | None ,
62- ) -> tuple [str | None , str | None , int ]:
63- """Decode page_token and return the effective (filter_value, filter_query_value, offset).
64-
65- If a page_token is present, its stored values take precedence over the
66- raw parameters (the token carries resolved values forward across pages).
62+ Returns None for empty/missing cursors. Raises ApiValidationError
63+ for unrecognized formats (e.g. legacy base64 tokens).
6764 """
68- page_token_dict = _decode_page_token (page_token = page_token )
69- offset = page_token_dict .get (_PAGE_TOKEN_OFFSET_KEY , 0 )
70- if page_token :
71- filter = page_token_dict .get (_PAGE_TOKEN_FILTER_KEY )
72- filter_query = page_token_dict .get (_PAGE_TOKEN_FILTER_QUERY_KEY )
73- return filter , filter_query , offset
65+ if not cursor :
66+ return None
67+ if CURSOR_SEPARATOR not in cursor :
68+ raise errors .ApiValidationError (
69+ f"Unrecognized page_token format. "
70+ f"Expected 'created_at~id' cursor. token={ cursor [:20 ]} ... (truncated)"
71+ )
72+ # maxsplit=1: split on first ~ only, so run_id can safely contain ~
73+ created_at_str , run_id = cursor .split (CURSOR_SEPARATOR , 1 )
74+ created_at = datetime .datetime .fromisoformat (created_at_str )
75+ # Normalize to naive UTC to match DB storage format (PipelineRun.created_at
76+ # is plain DateTime, not UtcDateTime -- stores/returns naive datetimes).
77+ if created_at .tzinfo is not None :
78+ created_at = created_at .astimezone (datetime .timezone .utc ).replace (tzinfo = None )
79+ return created_at , run_id
80+
81+
82+ def maybe_next_page_token (
83+ * ,
84+ rows : list [bts .PipelineRun ],
85+ page_size : int ,
86+ ) -> str | None :
87+ """Return a cursor token for the next page, or None if this is the last page."""
88+ if len (rows ) < page_size :
89+ return None
90+ last = rows [page_size - 1 ]
91+ return encode_cursor (last .created_at , last .id )
7492
7593
7694# ---------------------------------------------------------------------------
@@ -154,25 +172,15 @@ def build_list_filters(
154172 * ,
155173 filter_value : str | None ,
156174 filter_query_value : str | None ,
157- page_token_value : str | None ,
175+ cursor_value : str | None ,
158176 current_user : str | None ,
159- page_size : int ,
160- ) -> tuple [list [sql .ColumnElement ], int , str ]:
161- """Resolve pagination token, legacy filter, and filter_query into WHERE clauses.
162-
163- Returns (where_clauses, offset, next_page_token_encoded).
164- """
177+ ) -> list [sql .ColumnElement ]:
178+ """Build WHERE clauses from filters and cursor."""
165179 if filter_value and filter_query_value :
166180 raise errors .ApiValidationError (
167181 "Cannot use both 'filter' and 'filter_query'. Use one or the other."
168182 )
169183
170- filter_value , filter_query_value , offset = _resolve_filter_value (
171- filter = filter_value ,
172- filter_query = filter_query_value ,
173- page_token = page_token_value ,
174- )
175-
176184 if filter_value :
177185 filter_query_value = _convert_legacy_filter_to_filter_query (
178186 filter_value = filter_value ,
@@ -188,14 +196,18 @@ def build_list_filters(
188196 )
189197 )
190198
191- next_page_token = _encode_page_token (
192- page_token_dict = {
193- _PAGE_TOKEN_OFFSET_KEY : offset + page_size ,
194- _PAGE_TOKEN_FILTER_QUERY_KEY : filter_query_value ,
195- }
196- )
199+ cursor = decode_cursor (cursor_value )
200+ if cursor :
201+ cursor_created_at , cursor_id = cursor
202+ where_clauses .append (
203+ sql .tuple_ (bts .PipelineRun .created_at , bts .PipelineRun .id )
204+ < sql .tuple_ (
205+ sql .literal (cursor_created_at ),
206+ sql .literal (cursor_id ),
207+ )
208+ )
197209
198- return where_clauses , offset , next_page_token
210+ return where_clauses
199211
200212
201213def filter_query_to_where_clause (
0 commit comments