simplify colab dropdowns (+1 squashed commits)

Squashed commits:

[72aab0e8] simplify colab dropdown
This commit is contained in:
Concedo 2023-10-28 13:33:27 +08:00
parent eb9a93097b
commit 879d1ba268

View file

@ -16,7 +16,7 @@
"source": [
"## Welcome to the Official KoboldCpp Colab Notebook\r\n",
"It's really easy to get started. Just press the two **Play** buttons below, and then connect to the **Cloudflare URL** shown at the end. \r\n",
"You can select a model from the dropdown or enter a URL into the Manual URL field to use any GGUF model from Huggingface. Formating for a manual URL should look like `https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_M.gguf`"
"You can select a model from the dropdown, or enter a **custom URL** to a GGUF model (Example: `https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_M.gguf`)"
]
},
{
@ -42,23 +42,19 @@
"source": [
"#@title <b>v-- Enter your model below and then click this to start Koboldcpp</b>\r\n",
"\r\n",
"Model = \"LLaMA2-13B-Tiefighter.Q4_K_M.gguf\" #@param [\"LLaMA2-13B-Tiefighter.Q4_K_M.gguf\",\"mythomax-l2-13b.Q4_K_M.gguf\",\"remm-slerp-l2-13b.Q4_K_M.gguf\",\"xwin-lm-13b-v0.2.Q4_K_M.gguf\",\"stheno-l2-13b.Q4_K_M.gguf\"]{allow-input: true}\r\n",
"Manual_Link = False #@param {type:\"boolean\"}\r\n",
"Model_URL = \"OPTIONAL: Place URL Here\" #@param [\"\"]{allow-input: true}\r\n",
"Model = \"LLaMA2-13B-Tiefighter.Q4_K_M.gguf\" #@param [\"LLaMA2-13B-Tiefighter\",\"mythomax-l2-13b\",\"remm-slerp-l2-13b\",\"xwin-lm-13b-v0.2f\",\"stheno-l2-13b\",\"https://(YOUR_CUSTOM_MODEL_URL).gguf\"]{allow-input: true}\r\n",
"Layers = 43 #@param [43]{allow-input: true}\r\n",
"ContextSize = 4096 #@param [4096] {allow-input: true}\r\n",
"if Model == \"LLaMA2-13B-Tiefighter.Q4_K_M.gguf\":\n",
"if Model == \"LLaMA2-13B-Tiefighter\":\n",
" Model = \"https://huggingface.co/KoboldAI/LLaMA2-13B-Tiefighter-GGUF/resolve/main/LLaMA2-13B-Tiefighter.Q4_K_M.gguf\"\n",
"elif Model == \"mythomax-l2-13b.Q4_K_M.gguf\":\n",
"elif Model == \"mythomax-l2-13b\":\n",
" Model = \"https://huggingface.co/TheBloke/MythoMax-L2-13B-GGUF/resolve/main/mythomax-l2-13b.Q4_K_M.gguf\"\n",
"elif Model == \"remm-slerp-l2-13b.Q4_K_M.gguf\":\n",
"elif Model == \"remm-slerp-l2-13b\":\n",
" Model = \"https://huggingface.co/TheBloke/ReMM-SLERP-L2-13B-GGUF/resolve/main/remm-slerp-l2-13b.Q4_K_M.gguf\"\n",
"elif Model == \"xwin-lm-13b-v0.2.Q4_K_M.gguf\":\n",
"elif Model == \"xwin-lm-13b-v0.2\":\n",
" Model = \"https://huggingface.co/TheBloke/Xwin-LM-13B-v0.2-GGUF/resolve/main/xwin-lm-13b-v0.2.Q4_K_M.gguf\"\n",
"elif Model == \"stheno-l2-13b.Q4_K_M.gguf\":\n",
"elif Model == \"stheno-l2-13b\":\n",
" Model = \"https://huggingface.co/TheBloke/Stheno-L2-13B-GGUF/resolve/main/stheno-l2-13b.Q4_K_M.gguf\"\n",
"if Manual_Link:\n",
" Model = \"$Model_URL\"\n",
"\r\n",
"%cd /content\r\n",
"!git clone https://github.com/LostRuins/koboldcpp\r\n",