server : Fixed wrong function name in llamacpp server unit test (#11473)
The test_completion_stream_with_openai_library() function is actually with stream=False by default, and test_completion_with_openai_library() with stream=True
This commit is contained in:
parent
d0c08040b6
commit
cf8cc856d7
1 changed files with 2 additions and 2 deletions
|
@ -87,7 +87,7 @@ def test_completion_stream_vs_non_stream():
|
||||||
assert content_stream == res_non_stream.body["content"]
|
assert content_stream == res_non_stream.body["content"]
|
||||||
|
|
||||||
|
|
||||||
def test_completion_stream_with_openai_library():
|
def test_completion_with_openai_library():
|
||||||
global server
|
global server
|
||||||
server.start()
|
server.start()
|
||||||
client = OpenAI(api_key="dummy", base_url=f"http://{server.server_host}:{server.server_port}/v1")
|
client = OpenAI(api_key="dummy", base_url=f"http://{server.server_host}:{server.server_port}/v1")
|
||||||
|
@ -102,7 +102,7 @@ def test_completion_stream_with_openai_library():
|
||||||
assert match_regex("(going|bed)+", res.choices[0].text)
|
assert match_regex("(going|bed)+", res.choices[0].text)
|
||||||
|
|
||||||
|
|
||||||
def test_completion_with_openai_library():
|
def test_completion_stream_with_openai_library():
|
||||||
global server
|
global server
|
||||||
server.start()
|
server.start()
|
||||||
client = OpenAI(api_key="dummy", base_url=f"http://{server.server_host}:{server.server_port}/v1")
|
client = OpenAI(api_key="dummy", base_url=f"http://{server.server_host}:{server.server_port}/v1")
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue