Skip to content

Commit 921573e

Browse files
cborneteyurtsev
andauthored
core: Add ruff rules SLF (#30666)
Add ruff rules SLF: https://docs.astral.sh/ruff/rules/#flake8-self-slf --------- Co-authored-by: Eugene Yurtsev <eyurtsev@gmail.com>
1 parent d8a7eda commit 921573e

File tree

8 files changed

+1234
-1235
lines changed

8 files changed

+1234
-1235
lines changed

libs/core/langchain_core/language_models/chat_models.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -412,8 +412,8 @@ def _should_stream(
412412
**kwargs: Any,
413413
) -> bool:
414414
"""Determine if a given model call should hit the streaming API."""
415-
sync_not_implemented = type(self)._stream == BaseChatModel._stream
416-
async_not_implemented = type(self)._astream == BaseChatModel._astream
415+
sync_not_implemented = type(self)._stream == BaseChatModel._stream # noqa: SLF001
416+
async_not_implemented = type(self)._astream == BaseChatModel._astream # noqa: SLF001
417417

418418
# Check if streaming is implemented.
419419
if (not async_api) and sync_not_implemented:

libs/core/langchain_core/language_models/llms.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -522,7 +522,7 @@ def stream(
522522
stop: Optional[list[str]] = None,
523523
**kwargs: Any,
524524
) -> Iterator[str]:
525-
if type(self)._stream == BaseLLM._stream:
525+
if type(self)._stream == BaseLLM._stream: # noqa: SLF001
526526
# model doesn't implement streaming, so use default implementation
527527
yield self.invoke(input, config=config, stop=stop, **kwargs)
528528
else:
@@ -590,8 +590,8 @@ async def astream(
590590
**kwargs: Any,
591591
) -> AsyncIterator[str]:
592592
if (
593-
type(self)._astream is BaseLLM._astream
594-
and type(self)._stream is BaseLLM._stream
593+
type(self)._astream is BaseLLM._astream # noqa: SLF001
594+
and type(self)._stream is BaseLLM._stream # noqa: SLF001
595595
):
596596
yield await self.ainvoke(input, config=config, stop=stop, **kwargs)
597597
return

libs/core/langchain_core/runnables/configurable.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,7 @@ def prepare(
131131
"""
132132
runnable: Runnable[Input, Output] = self
133133
while isinstance(runnable, DynamicRunnable):
134-
runnable, config = runnable._prepare(merge_configs(runnable.config, config))
134+
runnable, config = runnable._prepare(merge_configs(runnable.config, config)) # noqa: SLF001
135135
return runnable, cast("RunnableConfig", config)
136136

137137
@abstractmethod

libs/core/langchain_core/tools/base.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -845,7 +845,7 @@ async def arun(
845845
child_config = patch_config(config, callbacks=run_manager.get_child())
846846
with set_config_context(child_config) as context:
847847
func_to_check = (
848-
self._run if self.__class__._arun is BaseTool._arun else self._arun
848+
self._run if self.__class__._arun is BaseTool._arun else self._arun # noqa: SLF001
849849
)
850850
if signature(func_to_check).parameters.get("run_manager"):
851851
tool_kwargs["run_manager"] = run_manager

libs/core/langchain_core/tracers/langchain.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -50,8 +50,8 @@ def log_error_once(method: str, exception: Exception) -> None:
5050

5151
def wait_for_all_tracers() -> None:
5252
"""Wait for all tracers to finish."""
53-
if rt._CLIENT is not None:
54-
rt._CLIENT.flush()
53+
if rt._CLIENT is not None: # noqa: SLF001
54+
rt._CLIENT.flush() # noqa: SLF001
5555

5656

5757
def get_client() -> Client:
@@ -123,8 +123,8 @@ def _start_trace(self, run: Run) -> None:
123123
run.tags = self.tags.copy()
124124

125125
super()._start_trace(run)
126-
if run._client is None:
127-
run._client = self.client # type: ignore[misc]
126+
if run.ls_client is None:
127+
run.ls_client = self.client
128128

129129
def on_chat_model_start(
130130
self,

libs/core/langchain_core/utils/mustache.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -379,7 +379,7 @@ def _get_key(
379379
try:
380380
# This allows for custom falsy data types
381381
# https://github.com/noahmorrison/chevron/issues/35
382-
if resolved_scope._CHEVRON_return_scope_when_falsy: # type: ignore[union-attr]
382+
if resolved_scope._CHEVRON_return_scope_when_falsy: # type: ignore[union-attr] # noqa: SLF001
383383
return resolved_scope
384384
except AttributeError:
385385
if resolved_scope in (0, False):

libs/core/pyproject.toml

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ authors = []
77
license = {text = "MIT"}
88
requires-python = ">=3.9"
99
dependencies = [
10-
"langsmith<0.4,>=0.1.125",
10+
"langsmith<0.4,>=0.1.126",
1111
"tenacity!=8.4.0,<10.0.0,>=8.1.0",
1212
"jsonpatch<2.0,>=1.33",
1313
"PyYAML>=5.3",
@@ -105,7 +105,6 @@ ignore = [
105105
"ERA",
106106
"PLR2004",
107107
"RUF",
108-
"SLF",
109108
]
110109
flake8-type-checking.runtime-evaluated-base-classes = ["pydantic.BaseModel","langchain_core.load.serializable.Serializable","langchain_core.runnables.base.RunnableSerializable"]
111110
flake8-annotations.allow-star-arg-any = true
@@ -132,5 +131,5 @@ classmethod-decorators = [ "classmethod", "langchain_core.utils.pydantic.pre_ini
132131
"tests/unit_tests/runnables/test_runnable.py" = [ "E501",]
133132
"tests/unit_tests/runnables/test_graph.py" = [ "E501",]
134133
"tests/unit_tests/test_tools.py" = [ "ARG",]
135-
"tests/**" = [ "D", "S",]
134+
"tests/**" = [ "D", "S", "SLF",]
136135
"scripts/**" = [ "INP", "S",]

0 commit comments

Comments
 (0)