Skip to content

Commit 6432647

Browse files
committed
fix(core): _should_stream respects streaming param
1 parent c6c7fce commit 6432647

File tree

3 files changed

+11
-7
lines changed

3 files changed

+11
-7
lines changed

libs/core/langchain_core/language_models/chat_models.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -451,6 +451,10 @@ def _should_stream(
451451
if self.disable_streaming == "tool_calling" and kwargs.get("tools"):
452452
return False
453453

454+
# Check if streaming has been disabled via the streaming parameter.
455+
if hasattr(self, "streaming") and not self.streaming:
456+
return False
457+
454458
# Check if a runtime streaming flag has been passed in.
455459
if "stream" in kwargs:
456460
return kwargs["stream"]

libs/partners/openai/tests/unit_tests/chat_models/test_base.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -350,7 +350,7 @@ def mock_glm4_completion() -> list:
350350

351351
async def test_glm4_astream(mock_glm4_completion: list) -> None:
352352
llm_name = "glm-4"
353-
llm = ChatOpenAI(model=llm_name, stream_usage=True)
353+
llm = ChatOpenAI(model=llm_name, stream_usage=True, streaming=True)
354354
mock_client = AsyncMock()
355355

356356
async def mock_create(*args: Any, **kwargs: Any) -> MockAsyncContextManager:
@@ -375,7 +375,7 @@ async def mock_create(*args: Any, **kwargs: Any) -> MockAsyncContextManager:
375375

376376
def test_glm4_stream(mock_glm4_completion: list) -> None:
377377
llm_name = "glm-4"
378-
llm = ChatOpenAI(model=llm_name, stream_usage=True)
378+
llm = ChatOpenAI(model=llm_name, stream_usage=True, streaming=True)
379379
mock_client = MagicMock()
380380

381381
def mock_create(*args: Any, **kwargs: Any) -> MockSyncContextManager:
@@ -431,7 +431,7 @@ def mock_deepseek_completion() -> list[dict]:
431431

432432
async def test_deepseek_astream(mock_deepseek_completion: list) -> None:
433433
llm_name = "deepseek-chat"
434-
llm = ChatOpenAI(model=llm_name, stream_usage=True)
434+
llm = ChatOpenAI(model=llm_name, stream_usage=True, streaming=True)
435435
mock_client = AsyncMock()
436436

437437
async def mock_create(*args: Any, **kwargs: Any) -> MockAsyncContextManager:
@@ -455,7 +455,7 @@ async def mock_create(*args: Any, **kwargs: Any) -> MockAsyncContextManager:
455455

456456
def test_deepseek_stream(mock_deepseek_completion: list) -> None:
457457
llm_name = "deepseek-chat"
458-
llm = ChatOpenAI(model=llm_name, stream_usage=True)
458+
llm = ChatOpenAI(model=llm_name, stream_usage=True, streaming=True)
459459
mock_client = MagicMock()
460460

461461
def mock_create(*args: Any, **kwargs: Any) -> MockSyncContextManager:
@@ -499,7 +499,7 @@ def mock_openai_completion() -> list[dict]:
499499

500500
async def test_openai_astream(mock_openai_completion: list) -> None:
501501
llm_name = "gpt-4o"
502-
llm = ChatOpenAI(model=llm_name, stream_usage=True)
502+
llm = ChatOpenAI(model=llm_name, stream_usage=True, streaming=True)
503503
mock_client = AsyncMock()
504504

505505
async def mock_create(*args: Any, **kwargs: Any) -> MockAsyncContextManager:
@@ -523,7 +523,7 @@ async def mock_create(*args: Any, **kwargs: Any) -> MockAsyncContextManager:
523523

524524
def test_openai_stream(mock_openai_completion: list) -> None:
525525
llm_name = "gpt-4o"
526-
llm = ChatOpenAI(model=llm_name, stream_usage=True)
526+
llm = ChatOpenAI(model=llm_name, stream_usage=True, streaming=True)
527527
mock_client = MagicMock()
528528

529529
def mock_create(*args: Any, **kwargs: Any) -> MockSyncContextManager:

libs/partners/openai/tests/unit_tests/chat_models/test_responses_stream.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -621,7 +621,7 @@ def _strip_none(obj: Any) -> Any:
621621

622622

623623
def test_responses_stream() -> None:
624-
llm = ChatOpenAI(model="o4-mini", output_version="responses/v1")
624+
llm = ChatOpenAI(model="o4-mini", output_version="responses/v1", streaming=True)
625625
mock_client = MagicMock()
626626

627627
def mock_create(*args: Any, **kwargs: Any) -> MockSyncContextManager:

0 commit comments

Comments
 (0)