|
5 | 5 | build_fetch_operation_name, |
6 | 6 | build_operation_name, |
7 | 7 | collect_paginated_results_async, |
| 8 | + ensure_started_job_id, |
8 | 9 | poll_until_terminal_status_async, |
9 | 10 | retry_operation_async, |
10 | 11 | wait_for_job_result_async, |
|
21 | 22 | StartScrapeJobParams, |
22 | 23 | StartScrapeJobResponse, |
23 | 24 | ) |
24 | | -from ....exceptions import HyperbrowserError |
25 | 25 |
|
26 | 26 |
|
27 | 27 | class BatchScrapeManager: |
@@ -74,9 +74,10 @@ async def start_and_wait( |
74 | 74 | max_status_failures: int = POLLING_ATTEMPTS, |
75 | 75 | ) -> BatchScrapeJobResponse: |
76 | 76 | job_start_resp = await self.start(params) |
77 | | - job_id = job_start_resp.job_id |
78 | | - if not job_id: |
79 | | - raise HyperbrowserError("Failed to start batch scrape job") |
| 77 | + job_id = ensure_started_job_id( |
| 78 | + job_start_resp.job_id, |
| 79 | + error_message="Failed to start batch scrape job", |
| 80 | + ) |
80 | 81 | operation_name = build_operation_name("batch scrape job ", job_id) |
81 | 82 |
|
82 | 83 | job_status = await poll_until_terminal_status_async( |
@@ -180,9 +181,10 @@ async def start_and_wait( |
180 | 181 | max_status_failures: int = POLLING_ATTEMPTS, |
181 | 182 | ) -> ScrapeJobResponse: |
182 | 183 | job_start_resp = await self.start(params) |
183 | | - job_id = job_start_resp.job_id |
184 | | - if not job_id: |
185 | | - raise HyperbrowserError("Failed to start scrape job") |
| 184 | + job_id = ensure_started_job_id( |
| 185 | + job_start_resp.job_id, |
| 186 | + error_message="Failed to start scrape job", |
| 187 | + ) |
186 | 188 | operation_name = build_operation_name("scrape job ", job_id) |
187 | 189 |
|
188 | 190 | return await wait_for_job_result_async( |
|
0 commit comments