From 6ab5eacfa61db07df1e0067c35898e22a1606ecf Mon Sep 17 00:00:00 2001 From: Alex Date: Mon, 9 Feb 2026 14:08:54 +0000 Subject: [PATCH] fix test --- tests/llm/test_google_llm.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/llm/test_google_llm.py b/tests/llm/test_google_llm.py index 8addfaa5..92bf1cbd 100644 --- a/tests/llm/test_google_llm.py +++ b/tests/llm/test_google_llm.py @@ -141,7 +141,9 @@ def test_raw_gen_stream_does_not_set_thinking_config_by_default(monkeypatch): assert captured["config"].thinking_config is None -def test_raw_gen_stream_sets_thinking_config_when_explicitly_requested(monkeypatch): +def test_raw_gen_stream_accepts_include_thoughts_without_setting_thinking_config( + monkeypatch, +): captured = {} def fake_stream(self, *args, **kwargs): @@ -152,7 +154,7 @@ def test_raw_gen_stream_sets_thinking_config_when_explicitly_requested(monkeypat llm = GoogleLLM(api_key="key") msgs = [{"role": "user", "content": "hello"}] - list( + out = list( llm._raw_gen_stream( llm, model="gemini", @@ -162,8 +164,8 @@ def test_raw_gen_stream_sets_thinking_config_when_explicitly_requested(monkeypat ) ) - assert captured["config"].thinking_config is not None - assert captured["config"].thinking_config.include_thoughts is True + assert out == ["a"] + assert captured["config"].thinking_config is None def test_raw_gen_stream_emits_thought_events(monkeypatch):