Add docs
Make fschat and flask-cors optional
This commit is contained in:
parent
ea5a7fbc95
commit
bee2a3d981
2 changed files with 21 additions and 6 deletions
|
@ -195,6 +195,14 @@ bash chat.sh
|
|||
API example using Python Flask: [api_like_OAI.py](api_like_OAI.py)
|
||||
This example must be used with server.cpp
|
||||
|
||||
requirements:
|
||||
|
||||
```shell
|
||||
pip install flask flask-cors fschat # flask-cors and fschat are optional. flask-cors is used to allow cross-origin requests, fschat is used for integration of chat template
|
||||
```
|
||||
|
||||
Run the server:
|
||||
|
||||
```sh
|
||||
python api_like_OAI.py
|
||||
```
|
||||
|
@ -204,6 +212,8 @@ After running the API server, you can use it in Python by setting the API base U
|
|||
openai.api_base = "http://<Your api-server IP>:port"
|
||||
```
|
||||
|
||||
For better integration with the model, it is recommended to utilize the `--chat-prompt-model` parameter when starting up the system, rather than relying solely on parameters like `--user-name`. This specific parameter accepts model names that have been registered within the [FastChat/conversation.py](https://github.com/lm-sys/FastChat/blob/main/fastchat/conversation.py) file, an example would be `llama-2`.
|
||||
|
||||
Then you can utilize llama.cpp as an OpenAI's **chat.completion** or **text_completion** API
|
||||
|
||||
### Extending or building alternative Web Front End
|
||||
|
|
|
@ -1,18 +1,23 @@
|
|||
import argparse
|
||||
from flask import Flask, jsonify, request, Response
|
||||
from flask_cors import CORS
|
||||
import urllib.parse
|
||||
import requests
|
||||
import time
|
||||
import json
|
||||
from fastchat import conversation
|
||||
|
||||
try:
|
||||
from fastchat import conversation
|
||||
except ImportError:
|
||||
conversation = None
|
||||
|
||||
app = Flask(__name__)
|
||||
CORS(app)
|
||||
try:
|
||||
from flask_cors import CORS
|
||||
CORS(app)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
parser = argparse.ArgumentParser(description="An example of using server.cpp with a similar API to OAI. It must be used together with server.cpp.")
|
||||
parser.add_argument("--chat-prompt-model", type=str, help="Set the model", default="")
|
||||
parser.add_argument("--chat-prompt-model", type=str, help="Set the model name of conversation template", default="")
|
||||
parser.add_argument("--chat-prompt", type=str, help="the top prompt in chat completions(default: 'A chat between a curious user and an artificial intelligence assistant. The assistant follows the given rules no matter what.\\n')", default='A chat between a curious user and an artificial intelligence assistant. The assistant follows the given rules no matter what.\\n')
|
||||
parser.add_argument("--user-name", type=str, help="USER name in chat completions(default: '\\nUSER: ')", default="\\nUSER: ")
|
||||
parser.add_argument("--ai-name", type=str, help="ASSISTANT name in chat completions(default: '\\nASSISTANT: ')", default="\\nASSISTANT: ")
|
||||
|
@ -33,7 +38,7 @@ def is_present(json, key):
|
|||
return True
|
||||
|
||||
|
||||
use_conversation_template = args.chat_prompt_model != ""
|
||||
use_conversation_template = args.chat_prompt_model != "" and conversation is not None
|
||||
|
||||
if use_conversation_template:
|
||||
conv = conversation.get_conv_template(args.chat_prompt_model)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue