diff --git a/tests/llm/test_google_llm.py b/tests/llm/test_google_llm.py index 92bf1cbd..8addfaa5 100644 --- a/tests/llm/test_google_llm.py +++ b/tests/llm/test_google_llm.py @@ -141,9 +141,7 @@ def test_raw_gen_stream_does_not_set_thinking_config_by_default(monkeypatch): assert captured["config"].thinking_config is None -def test_raw_gen_stream_accepts_include_thoughts_without_setting_thinking_config( - monkeypatch, -): +def test_raw_gen_stream_sets_thinking_config_when_explicitly_requested(monkeypatch): captured = {} def fake_stream(self, *args, **kwargs): @@ -154,7 +152,7 @@ def test_raw_gen_stream_accepts_include_thoughts_without_setting_thinking_config llm = GoogleLLM(api_key="key") msgs = [{"role": "user", "content": "hello"}] - out = list( + list( llm._raw_gen_stream( llm, model="gemini", @@ -164,8 +162,8 @@ def test_raw_gen_stream_accepts_include_thoughts_without_setting_thinking_config ) ) - assert out == ["a"] - assert captured["config"].thinking_config is None + assert captured["config"].thinking_config is not None + assert captured["config"].thinking_config.include_thoughts is True def test_raw_gen_stream_emits_thought_events(monkeypatch):