Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions tavily/async_tavily.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,6 +190,8 @@ async def _extract(
timeout: float = 30,
include_favicon: bool = None,
include_usage: bool = None,
query: str = None,
chunks_per_source: int = None,
**kwargs
) -> dict:
"""
Expand All @@ -204,6 +206,8 @@ async def _extract(
"timeout": timeout,
"include_favicon": include_favicon,
"include_usage": include_usage,
"query": query,
"chunks_per_source": chunks_per_source,
}

data = {k: v for k, v in data.items() if v is not None}
Expand Down Expand Up @@ -246,6 +250,8 @@ async def extract(self,
timeout: float = 30,
include_favicon: bool = None,
include_usage: bool = None,
query: str = None,
chunks_per_source: int = None,
**kwargs, # Accept custom arguments
) -> dict:
"""
Expand All @@ -259,6 +265,8 @@ async def extract(self,
timeout,
include_favicon=include_favicon,
include_usage=include_usage,
query=query,
chunks_per_source=chunks_per_source,
**kwargs,
)

Expand Down Expand Up @@ -287,6 +295,7 @@ async def _crawl(self,
timeout: float = 150,
include_favicon: bool = None,
include_usage: bool = None,
chunks_per_source: int = None,
**kwargs
) -> dict:
"""
Expand All @@ -309,6 +318,7 @@ async def _crawl(self,
"timeout": timeout,
"include_favicon": include_favicon,
"include_usage": include_usage,
"chunks_per_source": chunks_per_source,
}

if kwargs:
Expand Down Expand Up @@ -359,6 +369,7 @@ async def crawl(self,
timeout: float = 150,
include_favicon: bool = None,
include_usage: bool = None,
chunks_per_source: int = None,
**kwargs
) -> dict:
"""
Expand All @@ -381,6 +392,7 @@ async def crawl(self,
timeout=timeout,
include_favicon=include_favicon,
include_usage=include_usage,
chunks_per_source=chunks_per_source,
**kwargs)

return response_dict
Expand Down
12 changes: 12 additions & 0 deletions tavily/tavily.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,8 @@ def _extract(self,
timeout: float = 30,
include_favicon: bool = None,
include_usage: bool = None,
query: str = None,
chunks_per_source: int = None,
**kwargs
) -> dict:
"""
Expand All @@ -187,6 +189,8 @@ def _extract(self,
"timeout": timeout,
"include_favicon": include_favicon,
"include_usage": include_usage,
"query": query,
"chunks_per_source": chunks_per_source,
}

data = {k: v for k, v in data.items() if v is not None}
Expand Down Expand Up @@ -227,6 +231,8 @@ def extract(self,
timeout: float = 30,
include_favicon: bool = None,
include_usage: bool = None,
query: str = None,
chunks_per_source: int = None,
**kwargs, # Accept custom arguments
) -> dict:
"""
Expand All @@ -239,6 +245,8 @@ def extract(self,
timeout,
include_favicon=include_favicon,
include_usage=include_usage,
query=query,
chunks_per_source=chunks_per_source,
**kwargs)

tavily_results = response_dict.get("results", [])
Expand Down Expand Up @@ -266,6 +274,7 @@ def _crawl(self,
timeout: float = 150,
include_favicon: bool = None,
include_usage: bool = None,
chunks_per_source: int = None,
Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Bug: Missing query parameter in crawl methods

The PR description states that both query and chunks_per_source parameters should be added to extract and crawl methods. However, while chunks_per_source was added to the crawl methods, the query parameter was only added to the extract methods and is completely missing from _crawl and crawl in both tavily.py and async_tavily.py. This incomplete implementation means users cannot use intent-based content extraction with the crawl API.

Additional Locations (1)

Fix in Cursor Fix in Web

**kwargs
) -> dict:
"""
Expand All @@ -289,6 +298,7 @@ def _crawl(self,
"timeout": timeout,
"include_favicon": include_favicon,
"include_usage": include_usage,
"chunks_per_source": chunks_per_source,
}

if kwargs:
Expand Down Expand Up @@ -339,6 +349,7 @@ def crawl(self,
timeout: float = 150,
include_favicon: bool = None,
include_usage: bool = None,
chunks_per_source: int = None,
**kwargs
) -> dict:
"""
Expand All @@ -361,6 +372,7 @@ def crawl(self,
timeout=timeout,
include_favicon=include_favicon,
include_usage=include_usage,
chunks_per_source=chunks_per_source,
**kwargs)

return response_dict
Expand Down