diff --git a/tests/unit/_utils/test_sitemap.py b/tests/unit/_utils/test_sitemap.py index 3229641fda..807090eaa4 100644 --- a/tests/unit/_utils/test_sitemap.py +++ b/tests/unit/_utils/test_sitemap.py @@ -1,9 +1,7 @@ import base64 import gzip -import sys from datetime import datetime -import pytest from yarl import URL from crawlee._utils.sitemap import Sitemap, SitemapUrl, parse_sitemap @@ -95,8 +93,6 @@ async def test_gzipped_sitemap(server_url: URL, http_client: HttpClient) -> None assert set(sitemap.urls) == BASIC_RESULTS -# TODO: Remove this skip when #1460 is resolved. -@pytest.mark.skipif(sys.platform != 'linux', reason='Flaky with Curl on Windows, see #1460.') async def test_gzipped_sitemap_with_invalid_data(server_url: URL, http_client: HttpClient) -> None: """Test loading a invalid gzipped sitemap with correct type and .xml.gz url.""" compress_data = compress_gzip(BASIC_SITEMAP) diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 2c5d1df0fb..b75daa9e54 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -6,7 +6,7 @@ import logging import os import warnings -from typing import TYPE_CHECKING, cast +from typing import TYPE_CHECKING, Any, cast import pytest from curl_cffi import CurlHttpVersion @@ -205,9 +205,16 @@ def redirect_server_url(redirect_http_server: TestServer) -> URL: pytest.param('curl', id='curl'), ] ) -async def http_client(request: pytest.FixtureRequest) -> HttpClient: +async def http_client(request: pytest.FixtureRequest) -> AsyncGenerator[HttpClient, None]: + class_client: type[HttpClient] if request.param == 'curl': - return CurlImpersonateHttpClient(http_version=CurlHttpVersion.V1_1) - if request.param == 'impit': - return ImpitHttpClient(http3=False) - return HttpxHttpClient(http2=False) + class_client = CurlImpersonateHttpClient + kwargs: dict[str, Any] = {'http_version': CurlHttpVersion.V1_1} + elif request.param == 'impit': + class_client = ImpitHttpClient + kwargs = {'http3': False} + else: + class_client = HttpxHttpClient + kwargs = {'http2': True} + async with class_client(**kwargs) as client: + yield client diff --git a/tests/unit/http_clients/test_http_clients.py b/tests/unit/http_clients/test_http_clients.py index 4fcdec1999..bbb13846ec 100644 --- a/tests/unit/http_clients/test_http_clients.py +++ b/tests/unit/http_clients/test_http_clients.py @@ -1,7 +1,6 @@ from __future__ import annotations import os -import sys from typing import TYPE_CHECKING import pytest @@ -164,10 +163,6 @@ async def test_send_request_allow_redirects_false(custom_http_client: HttpClient async def test_stream(http_client: HttpClient, server_url: URL) -> None: - # TODO: Remove this skip when #1494 is resolved. - if isinstance(http_client, CurlImpersonateHttpClient) and sys.platform != 'linux': - pytest.skip('Flaky with Curl on Windows, see #1494.') - content_body: bytes = b'' async with http_client.stream(str(server_url)) as response: diff --git a/tests/unit/storages/test_request_queue.py b/tests/unit/storages/test_request_queue.py index b5691052cd..25d9d9a294 100644 --- a/tests/unit/storages/test_request_queue.py +++ b/tests/unit/storages/test_request_queue.py @@ -449,8 +449,9 @@ async def test_add_requests_wait_for_all( # Immediately after adding, the total count may be less than 15 due to background processing assert await rq.get_total_count() <= 15 - # Wait a 500 milliseconds for background tasks to complete - await asyncio.sleep(0.5) + # Wait for background tasks to complete + while await rq.get_total_count() < 15: # noqa: ASYNC110 + await asyncio.sleep(0.1) # Verify all requests were added assert await rq.get_total_count() == 15