diff --git a/colab.ipynb b/colab.ipynb index 8d8076a65..a31a27abb 100644 --- a/colab.ipynb +++ b/colab.ipynb @@ -42,19 +42,9 @@ "source": [ "#@title v-- Enter your model below and then click this to start Koboldcpp\r\n", "\r\n", - "Model = \"LLaMA2-13B-Tiefighter.Q4_K_M.gguf\" #@param [\"LLaMA2-13B-Tiefighter\",\"mythomax-l2-13b\",\"remm-slerp-l2-13b\",\"xwin-lm-13b-v0.2f\",\"stheno-l2-13b\",\"https://(YOUR_CUSTOM_MODEL_URL).gguf\"]{allow-input: true}\r\n", + "Model = \"https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_M.gguf\" #@param [\"https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/MythoMax-L2-13B-GGUF/resolve/main/mythomax-l2-13b.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/ReMM-SLERP-L2-13B-GGUF/resolve/main/remm-slerp-l2-13b.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/Xwin-LM-13B-v0.2-GGUF/resolve/main/xwin-lm-13b-v0.2.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/Stheno-L2-13B-GGUF/resolve/main/stheno-l2-13b.Q4_K_M.gguf\",\"https://huggingface.co/TheBloke/Mistral-7B-Instruct-v0.1-GGUF\"]{allow-input: true}\r\n", "Layers = 43 #@param [43]{allow-input: true}\r\n", - "ContextSize = 4096 #@param [4096] {allow-input: true}\r\n", - "if Model == \"LLaMA2-13B-Tiefighter\":\n", - " Model = \"https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_M.gguf\"\n", - "elif Model == \"mythomax-l2-13b\":\n", - " Model = \"https://huggingface.co/TheBloke/MythoMax-L2-13B-GGUF/resolve/main/mythomax-l2-13b.Q4_K_M.gguf\"\n", - "elif Model == \"remm-slerp-l2-13b\":\n", - " Model = \"https://huggingface.co/TheBloke/ReMM-SLERP-L2-13B-GGUF/resolve/main/remm-slerp-l2-13b.Q4_K_M.gguf\"\n", - "elif Model == \"xwin-lm-13b-v0.2\":\n", - " Model = \"https://huggingface.co/TheBloke/Xwin-LM-13B-v0.2-GGUF/resolve/main/xwin-lm-13b-v0.2.Q4_K_M.gguf\"\n", - "elif Model == \"stheno-l2-13b\":\n", - " Model = \"https://huggingface.co/TheBloke/Stheno-L2-13B-GGUF/resolve/main/stheno-l2-13b.Q4_K_M.gguf\"\n", + "ContextSize = 4096 #@param [4096] {allow-input: true}\r\n", "\r\n", "%cd /content\r\n", "!git clone https://github.com/LostRuins/koboldcpp\r\n",