ADD Chatbot UI example
This commit is contained in:
parent
fb62f92433
commit
098277cf5e
3 changed files with 115 additions and 0 deletions
70
examples/chatbot-ui/Dockerfile
Normal file
70
examples/chatbot-ui/Dockerfile
Normal file
|
@ -0,0 +1,70 @@
|
|||
FROM ubuntu
|
||||
|
||||
ENV NODE_VERSION=18.16.0
|
||||
ENV NODE_DIR /node
|
||||
ENV NODE_PATH ${NODE_DIR}/lib/node_modules
|
||||
|
||||
ENV LLAMA_CPP_BRANCH=master
|
||||
ENV GPT_LLAMA_CPP_BRANCH=master
|
||||
|
||||
ENV PATH ${NODE_DIR}/bin:${PATH}
|
||||
|
||||
# Make sure apt is non-interactive
|
||||
RUN set -x \
|
||||
&& echo 'debconf debconf/frontend select Noninteractive' \
|
||||
| debconf-set-selections
|
||||
|
||||
# Install deps
|
||||
RUN set -x \
|
||||
&& apt update --yes \
|
||||
&& apt install --yes --no-install-recommends \
|
||||
ca-certificates \
|
||||
curl \
|
||||
g++ \
|
||||
gcc \
|
||||
git \
|
||||
make \
|
||||
python-is-python3 \
|
||||
python3-pip \
|
||||
xz-utils
|
||||
|
||||
|
||||
# Install node
|
||||
RUN set -x \
|
||||
&& mkdir --parents "${NODE_DIR}" \
|
||||
&& curl \
|
||||
--location \
|
||||
--output /tmp/node.tar.gz \
|
||||
"https://nodejs.org/dist/v${NODE_VERSION}/node-v${NODE_VERSION}-linux-x64.tar.gz" \
|
||||
&& tar \
|
||||
--strip-components=1 \
|
||||
--ungzip \
|
||||
--extract \
|
||||
--file="/tmp/node.tar.gz" \
|
||||
--directory="${NODE_DIR}" \
|
||||
&& rm -f /tmp/node.tar.gz
|
||||
|
||||
# Install LLaMA CPP
|
||||
RUN set -x \
|
||||
&& git clone \
|
||||
--branch "${LLAMA_CPP_BRANCH}" \
|
||||
--depth 1 \
|
||||
https://github.com/ggerganov/llama.cpp \
|
||||
&& cd /llama.cpp \
|
||||
&& make -j \
|
||||
&& python -m pip install -r requirements.txt \
|
||||
&& mkdir -p models
|
||||
|
||||
# Install GPT LLaMA CPP
|
||||
RUN set -x \
|
||||
&& git clone \
|
||||
--branch "${GPT_LLAMA_CPP_BRANCH}" \
|
||||
--depth 1 \
|
||||
https://github.com/keldenl/gpt-llama.cpp \
|
||||
&& cd /gpt-llama.cpp \
|
||||
&& npm install
|
||||
|
||||
EXPOSE 443
|
||||
WORKDIR /gpt-llama.cpp
|
||||
ENTRYPOINT ["/bin/bash", "-c"]
|
||||
CMD ["npm start"]
|
19
examples/chatbot-ui/README.md
Normal file
19
examples/chatbot-ui/README.md
Normal file
|
@ -0,0 +1,19 @@
|
|||
# Chatbot UI with local LLMs
|
||||
|
||||
This example makes use of three projects [llama.cpp](https://github.com/ggerganov/llama.cpp), [gpt-llama.cpp](https://github.com/keldenl/gpt-llama.cpp) and [Chatbot UI](https://github.com/mckaywrigley/chatbot-ui) to provide a ChatGPT UI like experience with llama.cpp.
|
||||
|
||||
## How to use
|
||||
1. Edit the volume bind in `compose.yaml` with the path to the mode you wish to use
|
||||
|
||||
volumes:
|
||||
- type: bind
|
||||
source: /llm_models/something.ggml.q4_0.bin
|
||||
target: /llama.cpp/models/model.bin
|
||||
|
||||
1. Start services with `docker-compose`
|
||||
|
||||
docker-compose up --build
|
||||
|
||||
1. When updating use the following `docker-compose` command to make sure everything gets updated
|
||||
|
||||
docker-compose up --no-cache --build --pull-always --force-recreate
|
26
examples/chatbot-ui/compose.yaml
Normal file
26
examples/chatbot-ui/compose.yaml
Normal file
|
@ -0,0 +1,26 @@
|
|||
version: "3.9"
|
||||
services:
|
||||
gpt_llama:
|
||||
privileged: true
|
||||
build: .
|
||||
volumes:
|
||||
- type: bind
|
||||
source: /PATH/TO/MODEL.bin
|
||||
target: /llama.cpp/models/model.bin
|
||||
command: ["npm start mlock threads 7 "]
|
||||
networks:
|
||||
- llocal
|
||||
chatbot_ui:
|
||||
image: ghcr.io/mckaywrigley/chatbot-ui:main
|
||||
ports:
|
||||
- "3000:3000"
|
||||
environment:
|
||||
DEFAULT_MODEL: gpt-3.5-turbo
|
||||
DEFAULT_SYSTEM_PROMPT: You are ChatGPT, a large language model trained by OpenAI. Follow the user's instructions carefully. Respond using markdown.
|
||||
OPENAI_API_KEY: /llama.cpp/models/model.bin
|
||||
OPENAI_API_HOST: http://gpt_llama:443
|
||||
networks:
|
||||
- llocal
|
||||
|
||||
networks:
|
||||
llocal: {}
|
Loading…
Add table
Add a link
Reference in a new issue