From cf8cc856d7d02165bd08593b4757e1256a62d501 Mon Sep 17 00:00:00 2001 From: peidaqi Date: Tue, 28 Jan 2025 16:03:42 -0700 Subject: [PATCH] server : Fixed wrong function name in llamacpp server unit test (#11473) The test_completion_stream_with_openai_library() function is actually with stream=False by default, and test_completion_with_openai_library() with stream=True --- examples/server/tests/unit/test_completion.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/server/tests/unit/test_completion.py b/examples/server/tests/unit/test_completion.py index c1fc12462..0ed5b99be 100644 --- a/examples/server/tests/unit/test_completion.py +++ b/examples/server/tests/unit/test_completion.py @@ -87,7 +87,7 @@ def test_completion_stream_vs_non_stream(): assert content_stream == res_non_stream.body["content"] -def test_completion_stream_with_openai_library(): +def test_completion_with_openai_library(): global server server.start() client = OpenAI(api_key="dummy", base_url=f"http://{server.server_host}:{server.server_port}/v1") @@ -102,7 +102,7 @@ def test_completion_stream_with_openai_library(): assert match_regex("(going|bed)+", res.choices[0].text) -def test_completion_with_openai_library(): +def test_completion_stream_with_openai_library(): global server server.start() client = OpenAI(api_key="dummy", base_url=f"http://{server.server_host}:{server.server_port}/v1")