diff --git a/src/crawlee/crawlers/_playwright/_playwright_crawler.py b/src/crawlee/crawlers/_playwright/_playwright_crawler.py index 314930902e..6f4b2b0e9d 100644 --- a/src/crawlee/crawlers/_playwright/_playwright_crawler.py +++ b/src/crawlee/crawlers/_playwright/_playwright_crawler.py @@ -12,7 +12,6 @@ from pydantic import ValidationError from typing_extensions import NotRequired, TypedDict, TypeVar -from crawlee import service_locator from crawlee._request import Request, RequestOptions, RequestState from crawlee._types import BasicCrawlingContext, ConcurrencySettings from crawlee._utils.blocked import RETRY_CSS_SELECTORS @@ -145,10 +144,6 @@ def __init__( not supported, use `navigation_timeout` instead. kwargs: Additional keyword arguments to pass to the underlying `BasicCrawler`. """ - configuration = kwargs.pop('configuration', None) - if configuration is not None: - service_locator.set_configuration(configuration) - self._shared_navigation_timeouts: dict[int, SharedTimeout] = {} if browser_pool: diff --git a/tests/unit/crawlers/_playwright/test_playwright_crawler.py b/tests/unit/crawlers/_playwright/test_playwright_crawler.py index 360e5f7f26..e0ada5de1c 100644 --- a/tests/unit/crawlers/_playwright/test_playwright_crawler.py +++ b/tests/unit/crawlers/_playwright/test_playwright_crawler.py @@ -791,12 +791,15 @@ async def request_handler(context: PlaywrightCrawlingContext) -> None: assert check_data['default'] != check_data['send_request'] -async def test_overwrite_configuration() -> None: +async def test_passing_configuration() -> None: """Check that the configuration is allowed to be passed to the Playwrightcrawler.""" + service_locator.set_configuration(Configuration(log_level='INFO')) configuration = Configuration(log_level='WARNING') - PlaywrightCrawler(configuration=configuration) - used_configuration = service_locator.get_configuration() - assert used_configuration is configuration + + crawler = PlaywrightCrawler(configuration=configuration) + + assert service_locator.get_configuration().log_level == 'INFO' + assert crawler._service_locator.get_configuration().log_level == 'WARNING' async def test_extract_links(server_url: URL) -> None: