Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 10 additions & 10 deletions libs/astradb/langchain_astradb/vectorstores.py
Original file line number Diff line number Diff line change
Expand Up @@ -1233,7 +1233,7 @@ def similarity_search(
self,
query: str,
k: int = 4,
filter: dict[str, Any] | None = None, # noqa: A002
filter: dict[str, Any] | None = None,
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It is triggered with Ruff 0.5.0 but no longer with 0.5.7.
See astral-sh/ruff#12415 (comment)

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

TIL

**kwargs: Any,
) -> list[Document]:
"""Return docs most similar to query.
Expand Down Expand Up @@ -1268,7 +1268,7 @@ async def asimilarity_search(
self,
query: str,
k: int = 4,
filter: dict[str, Any] | None = None, # noqa: A002
filter: dict[str, Any] | None = None,
**kwargs: Any,
) -> list[Document]:
"""Return docs most similar to query.
Expand Down Expand Up @@ -1307,7 +1307,7 @@ def similarity_search_by_vector(
self,
embedding: list[float],
k: int = 4,
filter: dict[str, Any] | None = None, # noqa: A002
filter: dict[str, Any] | None = None,
**kwargs: Any,
) -> list[Document]:
"""Return docs most similar to embedding vector.
Expand All @@ -1334,7 +1334,7 @@ async def asimilarity_search_by_vector(
self,
embedding: list[float],
k: int = 4,
filter: dict[str, Any] | None = None, # noqa: A002
filter: dict[str, Any] | None = None,
**kwargs: Any,
) -> list[Document]:
"""Return docs most similar to embedding vector.
Expand All @@ -1361,7 +1361,7 @@ def similarity_search_with_score(
self,
query: str,
k: int = 4,
filter: dict[str, Any] | None = None, # noqa: A002
filter: dict[str, Any] | None = None,
) -> list[tuple[Document, float]]:
"""Return docs most similar to query with score.

Expand Down Expand Up @@ -1399,7 +1399,7 @@ async def asimilarity_search_with_score(
self,
query: str,
k: int = 4,
filter: dict[str, Any] | None = None, # noqa: A002
filter: dict[str, Any] | None = None,
) -> list[tuple[Document, float]]:
"""Return docs most similar to query with score.

Expand Down Expand Up @@ -1530,7 +1530,7 @@ def max_marginal_relevance_search_by_vector(
k: int = 4,
fetch_k: int = 20,
lambda_mult: float = 0.5,
filter: dict[str, Any] | None = None, # noqa: A002
filter: dict[str, Any] | None = None,
**kwargs: Any,
) -> list[Document]:
"""Return docs selected using the maximal marginal relevance.
Expand Down Expand Up @@ -1568,7 +1568,7 @@ async def amax_marginal_relevance_search_by_vector(
k: int = 4,
fetch_k: int = 20,
lambda_mult: float = 0.5,
filter: dict[str, Any] | None = None, # noqa: A002
filter: dict[str, Any] | None = None,
**kwargs: Any,
) -> list[Document]:
"""Return docs selected using the maximal marginal relevance.
Expand Down Expand Up @@ -1606,7 +1606,7 @@ def max_marginal_relevance_search(
k: int = 4,
fetch_k: int = 20,
lambda_mult: float = 0.5,
filter: dict[str, Any] | None = None, # noqa: A002
filter: dict[str, Any] | None = None,
**kwargs: Any,
) -> list[Document]:
"""Return docs selected using the maximal marginal relevance.
Expand Down Expand Up @@ -1655,7 +1655,7 @@ async def amax_marginal_relevance_search(
k: int = 4,
fetch_k: int = 20,
lambda_mult: float = 0.5,
filter: dict[str, Any] | None = None, # noqa: A002
filter: dict[str, Any] | None = None,
**kwargs: Any,
) -> list[Document]:
"""Return docs selected using the maximal marginal relevance.
Expand Down
44 changes: 40 additions & 4 deletions libs/astradb/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -63,10 +63,46 @@ optional = true
langchain-core = { git = "https://github.com/langchain-ai/langchain.git", subdirectory = "libs/core" }

[tool.ruff.lint]
select = [
"E", # pycodestyle
"F", # pyflakes
"I", # isort
pydocstyle.convention = "google"
pep8-naming.classmethod-decorators = [
"langchain_core.pydantic_v1.validator",
]
select = ["ALL"]
ignore = [
"ANN", # Already checked by mypy
"C90", # Do we want to activate (complexity) ?
"COM812", # Messes with the formatter
"D100", # Do we want to activate (docstring in module) ?
"D104", # Do we want to activate (docstring in package) ?
"D105", # Do we want to activate (docstring in magic method) ?
"D107", # Do we want to activate (docstring in __init__) ?
"EM", # Do we want to activate (error messages) ?
"ERA", # Do we want to activate (no commented code) ?
"FBT", # Do we want to activate (boolean trap) ?
"ISC001", # Messes with the formatter
"PERF203", # Incorrect detection
"PLR09", # TODO: do we enforce these ones (complexity) ?
"PTH", # Do we want to activate (use pathlib) ?
"TRY003", # A bit too strict ?

"BLE001", # TODO
"PT011", # TODO
"PT012", # TODO
"D101", # TODO
"D417", # TODO

]

[tool.ruff.lint.per-file-ignores]
"tests/*" = [
"D",
"S101",
"SLF001",
"T201",
"PLR2004",
]
"scripts/*" = [
"T201",
]

[tool.mypy]
Expand Down
4 changes: 2 additions & 2 deletions libs/astradb/tests/integration_tests/test_caches.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,8 +65,8 @@ async def aembed_query(self, text: str) -> list[float]:
class FakeLLM(LLM):
"""Fake LLM wrapper for testing purposes."""

queries: Optional[Mapping] = None
sequential_responses: Optional[bool] = False
queries: Optional[Mapping] = None # noqa: UP007
sequential_responses: Optional[bool] = False # noqa: UP007
response_index: int = 0

@validator("queries", always=True)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,6 @@ def test_astradb_loader_findoptions_deprecation(
self,
astra_db_credentials: AstraDBCredentials,
collection: Collection,
core_astra_db: AstraDB,
) -> None:
"""Test deprecation of 'find_options' and related warnings/errors."""
loader0 = AstraDBLoader(
Expand Down