Skip to content
4 changes: 0 additions & 4 deletions tests/unit/_utils/test_sitemap.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
import base64
import gzip
import sys
from datetime import datetime

import pytest
from yarl import URL

from crawlee._utils.sitemap import Sitemap, SitemapUrl, parse_sitemap
Expand Down Expand Up @@ -95,8 +93,6 @@ async def test_gzipped_sitemap(server_url: URL, http_client: HttpClient) -> None
assert set(sitemap.urls) == BASIC_RESULTS


# TODO: Remove this skip when #1460 is resolved.
@pytest.mark.skipif(sys.platform != 'linux', reason='Flaky with Curl on Windows, see #1460.')
async def test_gzipped_sitemap_with_invalid_data(server_url: URL, http_client: HttpClient) -> None:
"""Test loading a invalid gzipped sitemap with correct type and .xml.gz url."""
compress_data = compress_gzip(BASIC_SITEMAP)
Expand Down
19 changes: 13 additions & 6 deletions tests/unit/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import logging
import os
import warnings
from typing import TYPE_CHECKING, cast
from typing import TYPE_CHECKING, Any, cast

import pytest
from curl_cffi import CurlHttpVersion
Expand Down Expand Up @@ -205,9 +205,16 @@ def redirect_server_url(redirect_http_server: TestServer) -> URL:
pytest.param('curl', id='curl'),
]
)
async def http_client(request: pytest.FixtureRequest) -> HttpClient:
async def http_client(request: pytest.FixtureRequest) -> AsyncGenerator[HttpClient, None]:
class_client: type[HttpClient]
if request.param == 'curl':
return CurlImpersonateHttpClient(http_version=CurlHttpVersion.V1_1)
if request.param == 'impit':
return ImpitHttpClient(http3=False)
return HttpxHttpClient(http2=False)
class_client = CurlImpersonateHttpClient
kwargs: dict[str, Any] = {'http_version': CurlHttpVersion.V1_1}
elif request.param == 'impit':
class_client = ImpitHttpClient
kwargs = {'http3': False}
else:
class_client = HttpxHttpClient
kwargs = {'http2': True}
async with class_client(**kwargs) as client:
yield client
5 changes: 0 additions & 5 deletions tests/unit/http_clients/test_http_clients.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from __future__ import annotations

import os
import sys
from typing import TYPE_CHECKING

import pytest
Expand Down Expand Up @@ -164,10 +163,6 @@ async def test_send_request_allow_redirects_false(custom_http_client: HttpClient


async def test_stream(http_client: HttpClient, server_url: URL) -> None:
# TODO: Remove this skip when #1494 is resolved.
if isinstance(http_client, CurlImpersonateHttpClient) and sys.platform != 'linux':
pytest.skip('Flaky with Curl on Windows, see #1494.')

content_body: bytes = b''

async with http_client.stream(str(server_url)) as response:
Expand Down
5 changes: 3 additions & 2 deletions tests/unit/storages/test_request_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -449,8 +449,9 @@ async def test_add_requests_wait_for_all(
# Immediately after adding, the total count may be less than 15 due to background processing
assert await rq.get_total_count() <= 15

# Wait a 500 milliseconds for background tasks to complete
await asyncio.sleep(0.5)
# Wait for background tasks to complete
while await rq.get_total_count() < 15: # noqa: ASYNC110
await asyncio.sleep(0.1)

# Verify all requests were added
assert await rq.get_total_count() == 15
Expand Down
Loading