From 879d1ba268695c0a4c02d6b473ecc7eb9e08f05e Mon Sep 17 00:00:00 2001 From: Concedo <39025047+LostRuins@users.noreply.github.com> Date: Sat, 28 Oct 2023 13:33:27 +0800 Subject: [PATCH] simplify colab dropdowns (+1 squashed commits) Squashed commits: [72aab0e8] simplify colab dropdown --- colab.ipynb | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/colab.ipynb b/colab.ipynb index 8fcb36fa2..8d8076a65 100644 --- a/colab.ipynb +++ b/colab.ipynb @@ -16,7 +16,7 @@ "source": [ "## Welcome to the Official KoboldCpp Colab Notebook\r\n", "It's really easy to get started. Just press the two **Play** buttons below, and then connect to the **Cloudflare URL** shown at the end. \r\n", - "You can select a model from the dropdown or enter a URL into the Manual URL field to use any GGUF model from Huggingface. Formating for a manual URL should look like `https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_M.gguf`" + "You can select a model from the dropdown, or enter a **custom URL** to a GGUF model (Example: `https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_M.gguf`)" ] }, { @@ -42,23 +42,19 @@ "source": [ "#@title v-- Enter your model below and then click this to start Koboldcpp\r\n", "\r\n", - "Model = \"LLaMA2-13B-Tiefighter.Q4_K_M.gguf\" #@param [\"LLaMA2-13B-Tiefighter.Q4_K_M.gguf\",\"mythomax-l2-13b.Q4_K_M.gguf\",\"remm-slerp-l2-13b.Q4_K_M.gguf\",\"xwin-lm-13b-v0.2.Q4_K_M.gguf\",\"stheno-l2-13b.Q4_K_M.gguf\"]{allow-input: true}\r\n", - "Manual_Link = False #@param {type:\"boolean\"}\r\n", - "Model_URL = \"OPTIONAL: Place URL Here\" #@param [\"\"]{allow-input: true}\r\n", + "Model = \"LLaMA2-13B-Tiefighter.Q4_K_M.gguf\" #@param [\"LLaMA2-13B-Tiefighter\",\"mythomax-l2-13b\",\"remm-slerp-l2-13b\",\"xwin-lm-13b-v0.2f\",\"stheno-l2-13b\",\"https://(YOUR_CUSTOM_MODEL_URL).gguf\"]{allow-input: true}\r\n", "Layers = 43 #@param [43]{allow-input: true}\r\n", "ContextSize = 4096 #@param [4096] {allow-input: true}\r\n", - "if Model == \"LLaMA2-13B-Tiefighter.Q4_K_M.gguf\":\n", + "if Model == \"LLaMA2-13B-Tiefighter\":\n", " Model = \"https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_M.gguf\"\n", - "elif Model == \"mythomax-l2-13b.Q4_K_M.gguf\":\n", + "elif Model == \"mythomax-l2-13b\":\n", " Model = \"https://huggingface.co/TheBloke/MythoMax-L2-13B-GGUF/resolve/main/mythomax-l2-13b.Q4_K_M.gguf\"\n", - "elif Model == \"remm-slerp-l2-13b.Q4_K_M.gguf\":\n", + "elif Model == \"remm-slerp-l2-13b\":\n", " Model = \"https://huggingface.co/TheBloke/ReMM-SLERP-L2-13B-GGUF/resolve/main/remm-slerp-l2-13b.Q4_K_M.gguf\"\n", - "elif Model == \"xwin-lm-13b-v0.2.Q4_K_M.gguf\":\n", + "elif Model == \"xwin-lm-13b-v0.2\":\n", " Model = \"https://huggingface.co/TheBloke/Xwin-LM-13B-v0.2-GGUF/resolve/main/xwin-lm-13b-v0.2.Q4_K_M.gguf\"\n", - "elif Model == \"stheno-l2-13b.Q4_K_M.gguf\":\n", + "elif Model == \"stheno-l2-13b\":\n", " Model = \"https://huggingface.co/TheBloke/Stheno-L2-13B-GGUF/resolve/main/stheno-l2-13b.Q4_K_M.gguf\"\n", - "if Manual_Link:\n", - " Model = \"$Model_URL\"\n", "\r\n", "%cd /content\r\n", "!git clone https://github.com/LostRuins/koboldcpp\r\n",