Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 0 additions & 5 deletions src/crawlee/crawlers/_playwright/_playwright_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
from pydantic import ValidationError
from typing_extensions import NotRequired, TypedDict, TypeVar

from crawlee import service_locator
from crawlee._request import Request, RequestOptions, RequestState
from crawlee._types import BasicCrawlingContext, ConcurrencySettings
from crawlee._utils.blocked import RETRY_CSS_SELECTORS
Expand Down Expand Up @@ -145,10 +144,6 @@ def __init__(
not supported, use `navigation_timeout` instead.
kwargs: Additional keyword arguments to pass to the underlying `BasicCrawler`.
"""
configuration = kwargs.pop('configuration', None)
if configuration is not None:
service_locator.set_configuration(configuration)

self._shared_navigation_timeouts: dict[int, SharedTimeout] = {}

if browser_pool:
Expand Down
11 changes: 7 additions & 4 deletions tests/unit/crawlers/_playwright/test_playwright_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -791,12 +791,15 @@ async def request_handler(context: PlaywrightCrawlingContext) -> None:
assert check_data['default'] != check_data['send_request']


async def test_overwrite_configuration() -> None:
async def test_passing_configuration() -> None:
"""Check that the configuration is allowed to be passed to the Playwrightcrawler."""
service_locator.set_configuration(Configuration(log_level='INFO'))
configuration = Configuration(log_level='WARNING')
PlaywrightCrawler(configuration=configuration)
used_configuration = service_locator.get_configuration()
assert used_configuration is configuration

crawler = PlaywrightCrawler(configuration=configuration)

assert service_locator.get_configuration().log_level == 'INFO'
assert crawler._service_locator.get_configuration().log_level == 'WARNING'


async def test_extract_links(server_url: URL) -> None:
Expand Down
Loading