Skip to content

Commit 632c844

Browse files
committed
Rename crawler_id to just id
1 parent 415299f commit 632c844

2 files changed

Lines changed: 8 additions & 8 deletions

File tree

src/crawlee/crawlers/_basic/_basic_crawler.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -298,7 +298,7 @@ def __init__(
298298
status_message_logging_interval: timedelta = timedelta(seconds=10),
299299
status_message_callback: Callable[[StatisticsState, StatisticsState | None, str], Awaitable[str | None]]
300300
| None = None,
301-
crawler_id: int | None = None,
301+
id: int | None = None,
302302
_context_pipeline: ContextPipeline[TCrawlingContext] | None = None,
303303
_additional_context_managers: Sequence[AbstractAsyncContextManager] | None = None,
304304
_logger: logging.Logger | None = None,
@@ -351,7 +351,7 @@ def __init__(
351351
status_message_logging_interval: Interval for logging the crawler status messages.
352352
status_message_callback: Allows overriding the default status message. The default status message is
353353
provided in the parameters. Returning `None` suppresses the status message.
354-
crawler_id: Id of the crawler used for state and statistics tracking. You can use same explicit id to share
354+
id: Id of the crawler used for state and statistics tracking. You can use same explicit id to share
355355
state and statistics between two crawlers. By default, each crawler will use own state and statistics.
356356
_context_pipeline: Enables extending the request lifecycle and modifying the crawling context.
357357
Intended for use by subclasses rather than direct instantiation of `BasicCrawler`.
@@ -360,12 +360,12 @@ def __init__(
360360
_logger: A logger instance, typically provided by a subclass, for consistent logging labels.
361361
Intended for use by subclasses rather than direct instantiation of `BasicCrawler`.
362362
"""
363-
if crawler_id is None:
363+
if id is None:
364364
# This could look into set of already used ids, but lets not overengineer this.
365365
self.id = BasicCrawler.__next_id
366366
BasicCrawler.__next_id += 1
367367
else:
368-
self.id = crawler_id
368+
self.id = id
369369

370370
implicit_event_manager_with_explicit_config = False
371371
if not configuration:

tests/unit/crawlers/_basic/test_basic_crawler.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -821,8 +821,8 @@ async def handler(context: BasicCrawlingContext) -> None:
821821
assert isinstance(state['urls'], list)
822822
state['urls'].append(context.request.url)
823823

824-
crawler_1 = BasicCrawler(crawler_id=0, request_handler=handler)
825-
crawler_2 = BasicCrawler(crawler_id=0, request_handler=handler)
824+
crawler_1 = BasicCrawler(id=0, request_handler=handler)
825+
crawler_2 = BasicCrawler(id=0, request_handler=handler)
826826

827827
await crawler_1.run(['https://a.com'])
828828
await crawler_2.run(['https://b.com'])
@@ -838,8 +838,8 @@ async def test_crawlers_share_stats() -> None:
838838
async def handler(context: BasicCrawlingContext) -> None:
839839
await context.use_state({'urls': []})
840840

841-
crawler_1 = BasicCrawler(crawler_id=0, request_handler=handler)
842-
crawler_2 = BasicCrawler(crawler_id=0, request_handler=handler, statistics=crawler_1.statistics)
841+
crawler_1 = BasicCrawler(id=0, request_handler=handler)
842+
crawler_2 = BasicCrawler(id=0, request_handler=handler, statistics=crawler_1.statistics)
843843

844844
result1 = await crawler_1.run(['https://a.com'])
845845
result2 = await crawler_2.run(['https://b.com'])

0 commit comments

Comments
 (0)